updates
This commit is contained in:
parent
79c9ac434a
commit
c693937271
16
Makefile
16
Makefile
@ -1,12 +1,24 @@
|
|||||||
PREFIX=/usr/local
|
PREFIX=/usr/local
|
||||||
PYTHON_EXE_MSYS=${PREFIX}/bin/python3.sh
|
PYTHON_EXE_MSYS=${PREFIX}/bin/python3.sh
|
||||||
PIP_EXE_MSYS=${PREFIX}/bin/pip3.sh
|
PIP_EXE_MSYS=${PREFIX}/bin/pip3.sh
|
||||||
|
PYTHON_MINOR=`python3 --version 2>&1 | sed -e 's@^.* @@' -e 's@\.[0-9]*$$@@'`
|
||||||
LOCAL_DOCTEST=${PREFIX}/bin/toxcore_run_doctest3.bash
|
LOCAL_DOCTEST=${PREFIX}/bin/toxcore_run_doctest3.bash
|
||||||
DOCTEST=${LOCAL_DOCTEST}
|
DOCTEST=${LOCAL_DOCTEST}
|
||||||
MOD=exclude_badExits
|
MOD=exclude_badExits
|
||||||
|
|
||||||
|
all:: README.md exclude_badExits.hlp
|
||||||
|
|
||||||
|
exclude_badExits.hlp:: src/exclude_badExits/__main__.py
|
||||||
|
PYTHONPATH=${PWD}/src \
|
||||||
|
${PYTHON_EXE_MSYS} -m exclude_badExits --help > exclude_badExits.hlp
|
||||||
|
|
||||||
|
README.md:: src/exclude_badExits/__main__.py
|
||||||
|
PYTHONPATH=${PWD}/src \
|
||||||
|
${PYTHON_EXE_MSYS} -c 'from exclude_badExits.__main__ import __doc__; print(__doc__)' \
|
||||||
|
> README.md
|
||||||
|
|
||||||
check::
|
check::
|
||||||
sh python3.sh -c "import ${MOD}"
|
sh ${PYTHON_EXE_MSYS} -c "import ${MOD}"
|
||||||
|
|
||||||
lint::
|
lint::
|
||||||
sh .pylint.sh
|
sh .pylint.sh
|
||||||
@ -16,6 +28,8 @@ install::
|
|||||||
--no-deps \
|
--no-deps \
|
||||||
--target ${PREFIX}/lib/python${PYTHON_MINOR}/site-packages/ \
|
--target ${PREFIX}/lib/python${PYTHON_MINOR}/site-packages/ \
|
||||||
--upgrade .
|
--upgrade .
|
||||||
|
sed -i -e "1s@/usr/bin/python${PYTHON_MINOR}@${PYTHON_EXE_MSYS}@" \
|
||||||
|
${PREFIX}/lib/python${PYTHON_MINOR}/site-packages/bin/*
|
||||||
|
|
||||||
rsync::
|
rsync::
|
||||||
bash .rsync.sh
|
bash .rsync.sh
|
||||||
|
14
README.md
14
README.md
@ -89,6 +89,9 @@ Look in tor's notice.log for 'Every introduction point for service'
|
|||||||
```notice_log``` will parse the notice log for warnings about relays and
|
```notice_log``` will parse the notice log for warnings about relays and
|
||||||
services that will then be whitelisted.
|
services that will then be whitelisted.
|
||||||
|
|
||||||
|
```--torrc``` will read a file like /etc/tor/torrc and make some
|
||||||
|
suggestions based on what it finds; it will not edit or change the file.
|
||||||
|
|
||||||
```--torrc_output``` will write the torrc ExcludeNodes configuration to a file.
|
```--torrc_output``` will write the torrc ExcludeNodes configuration to a file.
|
||||||
|
|
||||||
```--good_contacts``` will write the contact info as a ciiss dictionary
|
```--good_contacts``` will write the contact info as a ciiss dictionary
|
||||||
@ -129,15 +132,10 @@ To be "good" the ContactInfo must:
|
|||||||
```--wellknown_output``` will make the program write the well-known files
|
```--wellknown_output``` will make the program write the well-known files
|
||||||
(```/.well-known/tor-relay/rsa-fingerprint.txt```) to a directory.
|
(```/.well-known/tor-relay/rsa-fingerprint.txt```) to a directory.
|
||||||
|
|
||||||
```--torrc_output``` will write a file of the commands that it sends to
|
```--relays_output write the download relays in json to a file. The relays
|
||||||
the Tor controller, so you can include it in a ```/etc/toc/torrc```.
|
|
||||||
|
|
||||||
```--relays_output``` write the download relays in json to a file. The relays
|
|
||||||
are downloaded from https://onionoo.torproject.org/details
|
are downloaded from https://onionoo.torproject.org/details
|
||||||
|
|
||||||
For usage, do ```python3 exclude_badExits.py --help```
|
For usage, do ```python3 exclude_badExits.py --help`
|
||||||
|
See [exclude_badExits.txt](./exclude_badExits.txt)
|
||||||
|
|
||||||
See [exclude_badExits.hlp](./exclude_badExits.hlp)
|
|
||||||
or there's a doctest file in [exclude_badExits.txt](./exclude_badExits.txt)
|
|
||||||
|
|
||||||
Up-to-date code is on https://git.plastiras.org/emdee/exclude_badExits
|
|
||||||
|
0
exclude_badExits-installer.bash
Normal file → Executable file
0
exclude_badExits-installer.bash
Normal file → Executable file
0
exclude_badExits-pki.bash
Normal file → Executable file
0
exclude_badExits-pki.bash
Normal file → Executable file
12
exclude_badExits.bash
Normal file → Executable file
12
exclude_badExits.bash
Normal file → Executable file
@ -6,16 +6,22 @@ SOCKS_PORT=9050
|
|||||||
SOCKS_HOST=127.0.0.1
|
SOCKS_HOST=127.0.0.1
|
||||||
CAFILE=/etc/ssl/certs/ca-certificates.crt
|
CAFILE=/etc/ssl/certs/ca-certificates.crt
|
||||||
# you may have a special python for installed packages
|
# you may have a special python for installed packages
|
||||||
EXE=`which python3.bash`
|
EXE=`which python3.sh`
|
||||||
MOD=exclude_badExits
|
MOD=exclude_badExits
|
||||||
|
|
||||||
[ -f exclude_badExits.hlp -a exclude_badExits.hlp -nt src/exclude_badExits/exclude_badExits.py] || \
|
if [ ! -f exclude_badExits.hlp ] || \
|
||||||
|
[ exclude_badExits.hlp -nt src/exclude_badExits/exclude_badExits.py ] ; then
|
||||||
PYTHONPATH=$PWD/src \
|
PYTHONPATH=$PWD/src \
|
||||||
$EXE src/${MOD}/exclude_badExits.py --help > exclude_badExits.hlp
|
$EXE src/${MOD}/exclude_badExits.py --help > exclude_badExits.hlp
|
||||||
[ -f README.md -a README.md -nt src/exclude_badExits/exclude_badExits.py] || \
|
fi
|
||||||
|
|
||||||
|
if [ ! -f README.md ] || \
|
||||||
|
[ README.md -nt src/exclude_badExits/exclude_badExits.py ] ; then
|
||||||
PYTHONPATH=$PWD/src \
|
PYTHONPATH=$PWD/src \
|
||||||
$EXE -c 'from exclude_badExits.exclude_badExits import __doc__; print(__doc__)' \
|
$EXE -c 'from exclude_badExits.exclude_badExits import __doc__; print(__doc__)' \
|
||||||
> README.md
|
> README.md
|
||||||
|
fi
|
||||||
|
|
||||||
# an example of running exclude_badExits with full debugging
|
# an example of running exclude_badExits with full debugging
|
||||||
# expected to 20 minutes or so
|
# expected to 20 minutes or so
|
||||||
declare -a LARGS
|
declare -a LARGS
|
||||||
|
@ -20,7 +20,7 @@ classifiers = [
|
|||||||
dynamic = ["version", "readme", "dependencies"] # cannot be dynamic ['license']
|
dynamic = ["version", "readme", "dependencies"] # cannot be dynamic ['license']
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
exclude_badExits = "exclude_badExits.exclude_badExits:iMain"
|
exclude_badExits = "exclude_badExits.__main__:iMain"
|
||||||
|
|
||||||
[tool.setuptools.dynamic]
|
[tool.setuptools.dynamic]
|
||||||
version = {attr = "exclude_badExits.__version__"}
|
version = {attr = "exclude_badExits.__version__"}
|
||||||
|
@ -1,2 +1,8 @@
|
|||||||
|
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
__version__ = "1.0.0"
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'exclude_utils',
|
||||||
|
'torcontactinfo',
|
||||||
|
'trustor_poc']
|
||||||
|
@ -197,30 +197,29 @@ from toxygen_wrapper.tests.support_onions import (
|
|||||||
bAreWeConnected, icheck_torrc, lIntroductionPoints,
|
bAreWeConnected, icheck_torrc, lIntroductionPoints,
|
||||||
yKNOWN_NODNS, zResolveDomain)
|
yKNOWN_NODNS, zResolveDomain)
|
||||||
|
|
||||||
from trustor_poc import TrustorError, idns_validate
|
from exclude_badExits.trustor_poc import TrustorError, idns_validate
|
||||||
try:
|
if False:
|
||||||
import xxxhttpx
|
import httpx
|
||||||
import asyncio
|
import asyncio
|
||||||
from trustor_poc import oDownloadUrlHttpx
|
from exclude_badExits.trustor_poc import oDownloadUrlHttpx
|
||||||
except:
|
else:
|
||||||
httpx = None
|
httpx = None
|
||||||
from trustor_poc import oDownloadUrlUrllib3Socks as oDownloadUrl
|
from exclude_badExits.trustor_poc import oDownloadUrlUrllib3Socks as oDownloadUrl
|
||||||
|
|
||||||
try:
|
from exclude_badExits.torcontactinfo import TorContactInfoParser
|
||||||
from torcontactinfo import TorContactInfoParser
|
|
||||||
oPARSER = TorContactInfoParser()
|
|
||||||
except ImportError:
|
|
||||||
oPARSER = None
|
|
||||||
|
|
||||||
from exclude_utils import (aCleanContact, sCleanEmail, aParseContact,
|
from exclude_badExits.exclude_utils import (
|
||||||
|
aCleanContact, sCleanEmail, aParseContact,
|
||||||
oStemController, oMainArgparser,
|
oStemController, oMainArgparser,
|
||||||
vwrite_goodnodes, vwrite_badnodes, vwrite_good_contacts,
|
vwrite_goodnodes, vwrite_badnodes, vwrite_good_contacts,
|
||||||
vwritefinale, vsetup_logging)
|
vwritefinale, vsetup_logging )
|
||||||
|
|
||||||
warnings.filterwarnings('ignore')
|
warnings.filterwarnings('ignore')
|
||||||
global LOG
|
global LOG
|
||||||
LOG = logging.getLogger()
|
LOG = logging.getLogger()
|
||||||
|
|
||||||
|
oPARSER = TorContactInfoParser()
|
||||||
|
|
||||||
aGOOD_CONTACTS_DB = {}
|
aGOOD_CONTACTS_DB = {}
|
||||||
aGOOD_CONTACTS_FPS = {}
|
aGOOD_CONTACTS_FPS = {}
|
||||||
aBAD_CONTACTS_DB = {}
|
aBAD_CONTACTS_DB = {}
|
||||||
@ -555,10 +554,10 @@ def aContactFps(oargs, a, fp, o, domain):
|
|||||||
if oargs.wellknown_output:
|
if oargs.wellknown_output:
|
||||||
sdir = os.path.join(oargs.wellknown_output, domain,
|
sdir = os.path.join(oargs.wellknown_output, domain,
|
||||||
'.well-known', 'tor-relay')
|
'.well-known', 'tor-relay')
|
||||||
|
sfile = os.path.join(sdir, "rsa-fingerprint.txt")
|
||||||
try:
|
try:
|
||||||
if not os.path.isdir(sdir):
|
if not os.path.isdir(sdir):
|
||||||
os.makedirs(sdir)
|
os.makedirs(sdir)
|
||||||
sfile = os.path.join(sdir, "rsa-fingerprint.txt")
|
|
||||||
with open(sfile, 'wt') as oFd:
|
with open(sfile, 'wt') as oFd:
|
||||||
oFd.write(data)
|
oFd.write(data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -665,6 +664,7 @@ def bProcessContact(b, texclude_set, aBadContacts, iFakeContact=0):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
lNOT_IN_RELAYS_DB = []
|
||||||
def bCheckFp(relay, sofar, lConds, texclude_set):
|
def bCheckFp(relay, sofar, lConds, texclude_set):
|
||||||
global aGOOD_CONTACTS_DB
|
global aGOOD_CONTACTS_DB
|
||||||
global aGOOD_CONTACTS_FPS
|
global aGOOD_CONTACTS_FPS
|
||||||
@ -853,7 +853,6 @@ def iMain(lArgs):
|
|||||||
iFakeContact = 0
|
iFakeContact = 0
|
||||||
iTotalContacts = 0
|
iTotalContacts = 0
|
||||||
aBadContacts = {}
|
aBadContacts = {}
|
||||||
lNOT_IN_RELAYS_DB = []
|
|
||||||
iR = 0
|
iR = 0
|
||||||
relays = controller.get_server_descriptors()
|
relays = controller.get_server_descriptors()
|
||||||
lqueue = []
|
lqueue = []
|
@ -11,7 +11,7 @@ import logging
|
|||||||
import warnings
|
import warnings
|
||||||
global LOG
|
global LOG
|
||||||
|
|
||||||
from toxygen_wrapper.tests.support_onions import (woGetStemController,
|
from toxygen_wrapper.tests.support_onions import (oGetStemController,
|
||||||
vwait_for_controller,)
|
vwait_for_controller,)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -20,8 +20,6 @@ try:
|
|||||||
yaml.indent(mapping=2, sequence=2)
|
yaml.indent(mapping=2, sequence=2)
|
||||||
safe_load = yaml.load
|
safe_load = yaml.load
|
||||||
except:
|
except:
|
||||||
yaml = None
|
|
||||||
if yaml is None:
|
|
||||||
try:
|
try:
|
||||||
import yaml
|
import yaml
|
||||||
safe_load = yaml.safe_load
|
safe_load = yaml.safe_load
|
||||||
@ -36,6 +34,15 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
coloredlogs = False
|
coloredlogs = False
|
||||||
|
|
||||||
|
lMORONS = ['hoster:Quintex Alliance Consulting ']
|
||||||
|
oCONTACT_RE = re.compile(r'([^:]*)(\s+)(email|url|proof|ciissversion|abuse|gpg):')
|
||||||
|
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
|
||||||
|
lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone',
|
||||||
|
'sandbox', 'offlinemasterkey']
|
||||||
|
lEMAILS = ['abuse', 'email']
|
||||||
|
|
||||||
|
ETC_DIR = '/usr/local/etc/tor/yaml'
|
||||||
|
|
||||||
def aCleanContact(a, lAT_REPS, lDOT_REPS, lNO_EMAIL) -> dict:
|
def aCleanContact(a, lAT_REPS, lDOT_REPS, lNO_EMAIL) -> dict:
|
||||||
# cleanups
|
# cleanups
|
||||||
for elt in lINTS:
|
for elt in lINTS:
|
||||||
@ -78,15 +85,6 @@ def sCleanEmail(s, lAT_REPS, lDOT_REPS, lNO_EMAIL) -> str:
|
|||||||
s = s.replace(elt, '?')
|
s = s.replace(elt, '?')
|
||||||
return s
|
return s
|
||||||
|
|
||||||
lMORONS = ['hoster:Quintex Alliance Consulting ']
|
|
||||||
oCONTACT_RE = re.compile(r'([^:]*)(\s+)(email|url|proof|ciissversion|abuse|gpg):')
|
|
||||||
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
|
|
||||||
lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone',
|
|
||||||
'sandbox', 'offlinemasterkey']
|
|
||||||
lEMAILS = ['abuse', 'email']
|
|
||||||
|
|
||||||
ETC_DIR = '/usr/local/etc/tor/yaml'
|
|
||||||
|
|
||||||
def oStemController(oargs, sEXCLUDE_EXIT_GROUP):
|
def oStemController(oargs, sEXCLUDE_EXIT_GROUP):
|
||||||
if os.path.exists(oargs.proxy_ctl):
|
if os.path.exists(oargs.proxy_ctl):
|
||||||
controller = oGetStemController(log_level=oargs.log_level,
|
controller = oGetStemController(log_level=oargs.log_level,
|
||||||
@ -261,7 +259,6 @@ def vsetup_logging(theLOG, log_level, logfile='', stream=sys.stdout) -> None:
|
|||||||
kwargs = dict(level=log_level,
|
kwargs = dict(level=log_level,
|
||||||
force=True,
|
force=True,
|
||||||
format='%(levelname)s %(message)s')
|
format='%(levelname)s %(message)s')
|
||||||
|
|
||||||
logging.basicConfig(**kwargs)
|
logging.basicConfig(**kwargs)
|
||||||
if add and logfile:
|
if add and logfile:
|
||||||
oHandler = logging.StreamHandler(stream)
|
oHandler = logging.StreamHandler(stream)
|
||||||
|
@ -29,7 +29,7 @@ import textwrap
|
|||||||
# HAS_RICH = True
|
# HAS_RICH = True
|
||||||
if True:
|
if True:
|
||||||
def rprint(value='', *args, **kwargs):
|
def rprint(value='', *args, **kwargs):
|
||||||
if value not in [None, False, True] and
|
if value not in [None, False, True] and \
|
||||||
isinstance(value, (dict, list, set, tuple)):
|
isinstance(value, (dict, list, set, tuple)):
|
||||||
value = json.dumps(value, indent=4)
|
value = json.dumps(value, indent=4)
|
||||||
return LOG.debug(value, *args, **kwargs)
|
return LOG.debug(value, *args, **kwargs)
|
||||||
|
@ -9,7 +9,7 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import warnings
|
import warnings
|
||||||
|
import logging
|
||||||
|
|
||||||
import urllib3.util
|
import urllib3.util
|
||||||
from urllib3.util import parse_url as urlparse
|
from urllib3.util import parse_url as urlparse
|
||||||
@ -24,7 +24,6 @@ except:
|
|||||||
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
|
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
|
||||||
|
|
||||||
global LOG
|
global LOG
|
||||||
import logging
|
|
||||||
warnings.filterwarnings('ignore')
|
warnings.filterwarnings('ignore')
|
||||||
LOG = logging.getLogger()
|
LOG = logging.getLogger()
|
||||||
|
|
||||||
@ -36,7 +35,7 @@ logging.getLogger("urllib3").setLevel(logging.INFO)
|
|||||||
# https://github.com/erans/torcontactinfoparser
|
# https://github.com/erans/torcontactinfoparser
|
||||||
# sys.path.append('/home/....')
|
# sys.path.append('/home/....')
|
||||||
try:
|
try:
|
||||||
from torcontactinfo import TorContactInfoParser
|
from exclude_badExits.torcontactinfo import TorContactInfoParser
|
||||||
except:
|
except:
|
||||||
TorContactInfoParser = None
|
TorContactInfoParser = None
|
||||||
|
|
||||||
@ -266,7 +265,10 @@ def oDownloadUrlRequests(uri, sCAfile: str, timeout: int = 30, host: str = '127.
|
|||||||
return oReqResp
|
return oReqResp
|
||||||
|
|
||||||
# There's no point in using asyncio because of duplicate urls in the tasks
|
# There's no point in using asyncio because of duplicate urls in the tasks
|
||||||
async def oDownloadUrlHttpx(uri:str, sCAfile:str, timeout;int = 30, host:str = '127.0.0.1', port:int = 9050, content_type:str = 'text/plain'):
|
async def oDownloadUrlHttpx(uri:str, sCAfile:str, timeout:int = 30,
|
||||||
|
host:str = '127.0.0.1', port:int = 9050,
|
||||||
|
content_type:str = 'text/plain'):
|
||||||
|
|
||||||
import httpcore
|
import httpcore
|
||||||
import asyncio
|
import asyncio
|
||||||
import httpx
|
import httpx
|
||||||
|
Loading…
Reference in New Issue
Block a user