Improvements

This commit is contained in:
emdee 2022-11-09 09:30:43 +00:00
parent d11d95aafe
commit 6f3207f02e
4 changed files with 241 additions and 104 deletions

View File

@ -83,6 +83,7 @@ import sys
import os import os
import re import re
import socket
import time import time
import argparse import argparse
from io import StringIO from io import StringIO
@ -120,6 +121,7 @@ LOG = logging.getLogger()
ETC_DIR = '/etc/tor/yaml' ETC_DIR = '/etc/tor/yaml'
aTRUST_DB = {} aTRUST_DB = {}
aTRUST_DB_INDEX = {}
sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/" sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/"
# You can call this while bootstrapping # You can call this while bootstrapping
sEXCLUDE_EXIT_KEY = 'ExcludeNodes' sEXCLUDE_EXIT_KEY = 'ExcludeNodes'
@ -131,14 +133,30 @@ sINCLUDE_GUARD_KEY = 'EntryNodes'
lKNOWN_NODNS = [ lKNOWN_NODNS = [
'0x0.is', '0x0.is',
'a9.wtf', 'a9.wtf',
'artikel5ev.de',
'arvanode.net', 'arvanode.net',
'dodo.pm', 'dodo.pm',
'dra-family.github.io',
'eraldonion.org',
'galtland.network', 'galtland.network',
'interfesse.net', 'interfesse.net',
'kryptonit.org', 'kryptonit.org',
'lonet.sh',
'moneneis.de',
'nx42.de', 'nx42.de',
'ormycloud.org',
'plied-privacy.net',
'rification-for-nusenu.net',
'sv.ch',
'thingtohide.nl',
'tikel10.org',
'tor-exit-2.aa78i2efsewr0neeknk.xyz', 'tor-exit-2.aa78i2efsewr0neeknk.xyz',
'tor-exit-3.aa78i2efsewr0neeknk.xyz', 'tor-exit-3.aa78i2efsewr0neeknk.xyz',
'torix-relays.org',
'tse.com',
'tuxli.org',
'w.digidow.eu',
'www.quintex.com',
] ]
def oMakeController(sSock='', port=9051): def oMakeController(sSock='', port=9051):
import getpass import getpass
@ -157,7 +175,6 @@ def lYamlBadNodes(sFile,
section=sEXCLUDE_EXIT_KEY, section=sEXCLUDE_EXIT_KEY,
lWanted=['BadExit']): lWanted=['BadExit']):
global oBAD_NODES global oBAD_NODES
root = 'ExcludeNodes'
l = [] l = []
if not yaml: return l if not yaml: return l
if os.path.exists(sFile): if os.path.exists(sFile):
@ -166,54 +183,19 @@ def lYamlBadNodes(sFile,
oBAD_NODES = o oBAD_NODES = o
# BROKEN # BROKEN
# root = 'ExcludeNodes'
# for elt in o[oBAD_ROOT][root][section].keys(): # for elt in o[oBAD_ROOT][root][section].keys():
# if lWanted and elt not in lWanted: continue # if lWanted and elt not in lWanted: continue
# # l += o[oBAD_ROOT][root][section][elt] # # l += o[oBAD_ROOT][root][section][elt]
global lKNOWN_NODNS
root = 'ExcludeDomains'
if root not in o[oBAD_ROOT] or not o[oBAD_ROOT][root]:
o[oBAD_ROOT][root] = lKNOWN_NODNS
else:
lKNOWN_NODNS = o[oBAD_ROOT][root]
return l return l
def icheck_torrc(sFile, oArgs):
l = open(sFile, 'rt').readlines()
a = {}
for elt in l:
elt = elt.strip()
if not elt or not ' ' in elt: continue
k,v = elt.split(' ', 1)
a[k] = v
keys = a
if 'HashedControlPassword' not in keys:
LOG.info('Add HashedControlPassword for security')
print('run: tor --hashcontrolpassword <TopSecretWord>')
if 'ExcludeExitNodes' in keys:
elt = 'BadNodes.ExcludeExitNodes.BadExit'
LOG.warn(f"Remove ExcludeNodes and move then to {oArgs.bad_nodes}")
print(f"move to the {elt} section as a list")
if 'GuardNodes' in keys:
elt = 'GoodNodes.GuardNodes'
LOG.warn(f"Remove GuardNodes and move then to {oArgs.good_nodes}")
print(f"move to the {elt} section as a list")
if 'ExcludeNodes' in keys:
elt = 'BadNodes.ExcludeNodes.BadExit'
LOG.warn(f"Remove ExcludeNodes and move then to {oArgs.bad_nodes}")
print(f"move to the {elt} section as a list")
if 'ControlSocket' not in keys and os.path.exists('/run/tor/control'):
LOG.info('Add ControlSocket /run/tor/control for us')
print('ControlSocket /run/tor/control GroupWritable RelaxDirModeCheck')
if 'UseMicrodescriptors' not in keys or keys['UseMicrodescriptors'] != '1':
LOG.info('Add UseMicrodescriptors 0 for us')
print('UseMicrodescriptors 0')
if 'AutomapHostsSuffixes' not in keys:
LOG.info('Add AutomapHostsSuffixes for onions')
print('AutomapHostsSuffixes .exit,.onion')
if 'AutoMapHostsOnResolve' not in keys:
LOG.info('Add AutoMapHostsOnResolve for onions')
print('AutoMapHostsOnResolve 1')
if 'VirtualAddrNetworkIPv4' not in keys:
LOG.info('Add VirtualAddrNetworkIPv4 for onions')
print('VirtualAddrNetworkIPv4 172.16.0.0/12')
return 0
oGOOD_NODES = {} oGOOD_NODES = {}
oGOOD_ROOT = 'GoodNodes' oGOOD_ROOT = 'GoodNodes'
def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'): def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'):
@ -248,13 +230,14 @@ def lIntroductionPoints(controller, lOnions):
l += [introduction_point.address] l += [introduction_point.address]
return l return l
lBAD_URLS = [] tBAD_URLS = set()
lATS = ['abuse', 'email'] lATS = ['abuse', 'email']
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory'] lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone', lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone',
'sandbox', 'offlinemasterkey'] 'sandbox', 'offlinemasterkey']
def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050): def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050):
global lBAD_URLS global tBAD_URLS
global lKNOWN_NODNS
# cleanups for yaml # cleanups for yaml
for elt in lINTS: for elt in lINTS:
if elt in a: if elt in a:
@ -271,12 +254,17 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
a.update({'fps': []}) a.update({'fps': []})
keys = list(a.keys()) keys = list(a.keys())
# test the url for fps and add it to the array # test the url for fps and add it to the array
if 'proof' not in keys: if 'proof' not in keys:
LOG.warn(f"{fp} 'proof' not in {keys}") LOG.warn(f"{fp} 'proof' not in {keys}")
return a return a
if aTRUST_DB_INDEX and fp in aTRUST_DB_INDEX.keys():
aCachedContact = aTRUST_DB_INDEX[fp]
if aCachedContact['email'] = a['email']:
return aCachedContact
if 'url' not in keys: if 'url' not in keys:
if 'uri' not in keys: if 'uri' not in keys:
a['url'] = '' a['url'] = ''
@ -292,12 +280,21 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
a['url'] = 'https:' +a['url'] a['url'] = 'https:' +a['url']
# domain should be a unique key for contacts # domain should be a unique key for contacts
domain = a['url'][8:] domain = a['url'][8:].strip('/')
try: try:
ip = sTorResolve(domain) ip = sTorResolve(domain)
except Exception as e: except Exception as e:
lpair = getaddrinfo(domain, 443) try:
if lpait is None: lpair = getaddrinfo(domain, 443)
except (socket.gaierror, ) as e:
LOG.debug("{e}")
lpair = None
lKNOWN_NODNS.append(domain)
except Exception as e:
LOG.warn("{e}")
lpair = None
lKNOWN_NODNS.append(domain)
if lpair is None:
LOG.warn(f"TorResolv and getaddrinfo failed for {domain}") LOG.warn(f"TorResolv and getaddrinfo failed for {domain}")
return a return a
ip = lpair[0] ip = lpair[0]
@ -324,24 +321,24 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
LOG.exception(f"AttributeError downloading from {domain} {e}") LOG.exception(f"AttributeError downloading from {domain} {e}")
except CertificateError as e: except CertificateError as e:
LOG.warn(f"CertificateError downloading from {domain} {e}") LOG.warn(f"CertificateError downloading from {domain} {e}")
lBAD_URLS += [a['url']] tBAD_URLS.add(a['url'])
except TrustorError as e: except TrustorError as e:
LOG.warn(f"TrustorError downloading from {domain} {e.args}") LOG.warn(f"TrustorError downloading from {domain} {e.args}")
lBAD_URLS += [a['url']] tBAD_URLS.add(a['url'])
except (BaseException ) as e: except (BaseException ) as e:
LOG.error(f"Exception {type(e)} downloading from {domain} {e}") LOG.error(f"Exception {type(e)} downloading from {domain} {e}")
else: else:
if o.status_code >= 300: if o.status_code >= 300:
LOG.warn(f"Error downloading from {domain} {o.status_code} {o.reason}") LOG.warn(f"Error downloading from {domain} {o.status_code} {o.reason}")
# any reason retry? # any reason retry?
lBAD_URLS += [a['url']] tBAD_URLS.add(a['url'])
return a return a
l = o.text.upper().strip().split('\n') l = o.text.upper().strip().split('\n')
if not l: if not l:
# already squacked in lD # already squacked in lD
LOG.warn(f"Downloading from {domain} empty for {fp}") LOG.warn(f"Downloading from {domain} empty for {fp}")
lBAD_URLS += [a['url']] tBAD_URLS.add(a['url'])
else: else:
a['fps'] = [elt for elt in l if elt and len(elt) == 40 a['fps'] = [elt for elt in l if elt and len(elt) == 40
and not elt.startswith('#')] and not elt.startswith('#')]
@ -521,13 +518,23 @@ def iMain(lArgs):
icheck_torrc(sFile, oArgs) icheck_torrc(sFile, oArgs)
global aTRUST_DB global aTRUST_DB
global aTRUST_DB_INDEX
sFile = oArgs.proof_output sFile = oArgs.proof_output
if sFile and os.path.exists(sFile): if sFile and os.path.exists(sFile):
try: try:
with open(sFile, 'rt') as oFd: with open(sFile, 'rt') as oFd:
aTRUST_DB = yaml.safe_load(oFd) aTRUST_DB = yaml.safe_load(oFd)
except: # reverse lookup of fps to contacts
aTRUST_DB = {} # but...
for k,v in aTRUST_DB:
aTRUST_DB_INDEX[k] = v
if 'fps' in aTRUST_DB[k].keys():
for fp in aTRUST_DB[k]['fps']:
aTRUST_DB_INDEX[fp] = v
except Exception as e:
LOG.warn(f"Error reading YAML TrustDB {sFile} {e}")
if os.path.exists(oArgs.proxy_ctl): if os.path.exists(oArgs.proxy_ctl):
controller = oMakeController(sSock=oArgs.proxy_ctl) controller = oMakeController(sSock=oArgs.proxy_ctl)
@ -615,7 +622,12 @@ def iMain(lArgs):
i = c.find(' ') i = c.find(' ')
if i >=0: if i >=0:
c = c[:i] c = c[:i]
LOG.info(f"{relay.fingerprint} skipping 'dns-rsa' {c} {sofar}") if c in lKNOWN_NODNS:
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {c} {sofar}")
exit_excludelist.append(relay.fingerprint)
continue
LOG.info(f"skipping 'dns-rsa' {relay.fingerprint}.{c} {sofar}")
iDnsContact += 1 iDnsContact += 1
continue continue
@ -626,17 +638,18 @@ def iMain(lArgs):
LOG.warn(f"{relay.fingerprint} did not parse {sofar}") LOG.warn(f"{relay.fingerprint} did not parse {sofar}")
exit_excludelist.append(relay.fingerprint) exit_excludelist.append(relay.fingerprint)
continue continue
if 'url' in a and a['url'] and a['url'] in lBAD_URLS: if 'url' in a and a['url']:
# The fp is using a contact with a URL we know is bad if a['url'] in tBAD_URLS:
LOG.info(f"{relay.fingerprint} skipping in lBAD_URLS {a['url']} {sofar}") # The fp is using a contact with a URL we know is bad
exit_excludelist.append(relay.fingerprint) LOG.info(f"{relay.fingerprint} skipping in tBAD_URLS {a['url']} {sofar}")
continue exit_excludelist.append(relay.fingerprint)
domain = a['url'][8:] continue
if domain in lKNOWN_NODNS: domain = a['url'].replace('https://', '').replace('http://', '')
# The fp is using a contact with a URL we know is nonexistent if domain in lKNOWN_NODNS:
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {a['url']} {sofar}") # The fp is using a contact with a URL we know is bogus
exit_excludelist.append(relay.fingerprint) LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {a['url']} {sofar}")
continue exit_excludelist.append(relay.fingerprint)
continue
b = aVerifyContact(list(a.values())[0], b = aVerifyContact(list(a.values())[0],
@ -664,6 +677,10 @@ def iMain(lArgs):
# great contact had good fps and we are in them # great contact had good fps and we are in them
lProofGoodFps += b['fps'] lProofGoodFps += b['fps']
if relay.fingerprint in aProofUri.keys():
# a cached entry
continue
LOG.info(f"{relay.fingerprint} verified {b['url']} {sofar}") LOG.info(f"{relay.fingerprint} verified {b['url']} {sofar}")
# add our contact info to the trustdb # add our contact info to the trustdb
aProofUri[relay.fingerprint] = b aProofUri[relay.fingerprint] = b
@ -706,7 +723,10 @@ def iMain(lArgs):
global oBAD_NODES global oBAD_NODES
oBAD_NODES['BadNodes']['ExcludeNodes']['BadExit'] = exit_excludelist oBAD_NODES['BadNodes']['ExcludeNodes']['BadExit'] = exit_excludelist
global lKNOWN_NODNS
o[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS
vwrite_badnodes(oArgs) vwrite_badnodes(oArgs)
global oGOOD_NODES global oGOOD_NODES
oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = lProofGoodFps oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = lProofGoodFps
vwrite_goodnodes(oArgs) vwrite_goodnodes(oArgs)
@ -739,7 +759,7 @@ def iMain(lArgs):
except stem.SocketClosed as e: except stem.SocketClosed as e:
LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor") LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
retval += 1 retval += 1
)
return retval return retval
except InvalidRequest as e: except InvalidRequest as e:

View File

@ -261,3 +261,70 @@ class HTTPSAdapter(HTTPAdapter):
raise raise
return self.build_response(request, resp) return self.build_response(request, resp)
from urllib3.util.ssl_match_hostname import match_hostname as _match_hostname
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError(
"empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED"
)
try:
# Divergence from upstream: ipaddress can't handle byte str
host_ip = ipaddress.ip_address(_to_unicode(hostname))
except (UnicodeError, ValueError):
# ValueError: Not an IP address (common case)
# UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking
# byte strings. addresses should be all ascii, so we consider it not
# an ipaddress in this case
host_ip = None
except AttributeError:
# Divergence from upstream: Make ipaddress library optional
if ipaddress is None:
host_ip = None
else: # Defensive
raise
dnsnames = []
san = cert.get("subjectAltName", ())
for key, value in san:
if key == "DNS":
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == "IP Address":
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get("subject", ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == "commonName":
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
# soften this to allow subdomain matching
raise CertificateError(
"hostname %r "
"doesn't match any of %s" % (hostname, ", ".join(map(repr, dnsnames)))
)
elif len(dnsnames) == 1:
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
else:
raise CertificateError(
"no appropriate commonName or subjectAltName fields were found"
)
from urllib3.util.ssl_match_hostname = match_hostname

View File

@ -2,19 +2,18 @@
import os import os
import sys import sys
import argparse
import re import re
import traceback import traceback
import logging
import shutil import shutil
import json
import socket import socket
import select import select
from ctypes import * import time
import time, contextlib
import unittest global LOG
from random import Random import logging
random = Random() import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
bHAVE_TORR = shutil.which('tor-resolve') bHAVE_TORR = shutil.which('tor-resolve')
@ -78,6 +77,7 @@ def sTorResolve(target,
): ):
MAX_INFO_RESPONSE_PACKET_LENGTH = 8 MAX_INFO_RESPONSE_PACKET_LENGTH = 8
target = target.strip('/')
seb = b"\o004\o360\o000\o000\o000\o000\o000\o001\o000" seb = b"\o004\o360\o000\o000\o000\o000\o000\o001\o000"
seb = b"\x04\xf0\x00\x00\x00\x00\x00\x01\x00" seb = b"\x04\xf0\x00\x00\x00\x00\x00\x01\x00"
seb += bytes(target, 'US-ASCII') + b"\x00" seb += bytes(target, 'US-ASCII') + b"\x00"
@ -158,3 +158,45 @@ def getaddrinfo(sHost, sPort):
return None return None
return lPair return lPair
def icheck_torrc(sFile, oArgs):
l = open(sFile, 'rt').readlines()
a = {}
for elt in l:
elt = elt.strip()
if not elt or not ' ' in elt: continue
k,v = elt.split(' ', 1)
a[k] = v
keys = a
if 'HashedControlPassword' not in keys:
LOG.info('Add HashedControlPassword for security')
print('run: tor --hashcontrolpassword <TopSecretWord>')
if 'ExcludeExitNodes' in keys:
elt = 'BadNodes.ExcludeExitNodes.BadExit'
LOG.warn(f"Remove ExcludeNodes and move then to {oArgs.bad_nodes}")
print(f"move to the {elt} section as a list")
if 'GuardNodes' in keys:
elt = 'GoodNodes.GuardNodes'
LOG.warn(f"Remove GuardNodes and move then to {oArgs.good_nodes}")
print(f"move to the {elt} section as a list")
if 'ExcludeNodes' in keys:
elt = 'BadNodes.ExcludeNodes.BadExit'
LOG.warn(f"Remove ExcludeNodes and move then to {oArgs.bad_nodes}")
print(f"move to the {elt} section as a list")
if 'ControlSocket' not in keys and os.path.exists('/run/tor/control'):
LOG.info('Add ControlSocket /run/tor/control for us')
print('ControlSocket /run/tor/control GroupWritable RelaxDirModeCheck')
if 'UseMicrodescriptors' not in keys or keys['UseMicrodescriptors'] != '1':
LOG.info('Add UseMicrodescriptors 0 for us')
print('UseMicrodescriptors 0')
if 'AutomapHostsSuffixes' not in keys:
LOG.info('Add AutomapHostsSuffixes for onions')
print('AutomapHostsSuffixes .exit,.onion')
if 'AutoMapHostsOnResolve' not in keys:
LOG.info('Add AutoMapHostsOnResolve for onions')
print('AutoMapHostsOnResolve 1')
if 'VirtualAddrNetworkIPv4' not in keys:
LOG.info('Add VirtualAddrNetworkIPv4 for onions')
print('VirtualAddrNetworkIPv4 172.16.0.0/12')
return 0

View File

@ -8,6 +8,7 @@ import requests
from stem.control import Controller from stem.control import Controller
from stem.util.tor_tools import * from stem.util.tor_tools import *
from urllib.parse import urlparse from urllib.parse import urlparse
from urllib3.util.retry import Retry
try: try:
# unbound is not on pypi # unbound is not on pypi
@ -29,13 +30,8 @@ try:
except: except:
TorContactInfoParser = None TorContactInfoParser = None
# for now we support max_depth = 0 only
# this PoC version has no support for recursion
# https://github.com/nusenu/tor-relay-operator-ids-trust-information#trust-information-consumers
supported_max_depths = ['0']
# https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#ciissversion class TrustorError(Exception): pass
accepted_ciissversions = ['2']
# https://stackoverflow.com/questions/2532053/validate-a-hostname-string # https://stackoverflow.com/questions/2532053/validate-a-hostname-string
# FIXME this check allows non-fqdn names # FIXME this check allows non-fqdn names
@ -47,7 +43,6 @@ def is_valid_hostname(hostname):
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE) allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split(".")) return all(allowed.match(x) for x in hostname.split("."))
def read_local_trust_config(trust_config): def read_local_trust_config(trust_config):
''' '''
reads a local configuration file containing trusted domains reads a local configuration file containing trusted domains
@ -55,6 +50,12 @@ def read_local_trust_config(trust_config):
''' '''
result = [] result = []
# for now we support max_depth = 0 only
# this PoC version has no support for recursion
# https://github.com/nusenu/tor-relay-operator-ids-trust-information#trust-information-consumers
supported_max_depths = ['0']
if (os.path.isfile(trust_config)): if (os.path.isfile(trust_config)):
f = open(trust_config) f = open(trust_config)
for line in f: for line in f:
@ -150,6 +151,8 @@ def find_validation_candidates(controller, trusted_domains=[],validation_cache=[
# https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#proof # https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#proof
accepted_proof_types = ['uri-rsa','dns-rsa'] accepted_proof_types = ['uri-rsa','dns-rsa']
# https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#ciissversion
accepted_ciissversions = ['2']
result = {} result = {}
@ -202,7 +205,7 @@ def find_validation_candidates(controller, trusted_domains=[],validation_cache=[
result[domain] = {prooftype : [fingerprint]} result[domain] = {prooftype : [fingerprint]}
return result return result
def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050): def oDownloadUrl(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
uri="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt" uri="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt"
# socks proxy used for outbound web requests (for validation of proofs) # socks proxy used for outbound web requests (for validation of proofs)
proxy = {'https': 'socks5h://' +host +':' +str(port)} proxy = {'https': 'socks5h://' +host +':' +str(port)}
@ -216,28 +219,28 @@ def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
# urllib3.connection WARNING Certificate did not match expected hostname: # urllib3.connection WARNING Certificate did not match expected hostname:
head = requests.head(uri, timeout=timeout, proxies=proxy, headers=headers) head = requests.head(uri, timeout=timeout, proxies=proxy, headers=headers)
except Exception as e: except Exception as e:
LOG.warn(f"HTTP HEAD request failed for {uri} {e}") raise TrustorError(f"HTTP HEAD request failed for {uri} {e}")
head = None
return []
if head.status_code != 200:
return []
if not head.headers['Content-Type'].startswith('text/plain'):
return []
assert os.path.exists(sCAfile), sCAfile if head.status_code >= 300:
raise TrustorError(f"HTTP Errorcode {head.status_code}")
if not head.headers['Content-Type'].startswith('text/plain'):
raise TrustorError(f"HTTP Content-Type != text/plain")
if not os.path.exists(sCAfile):
raise TrustorError(f"File not found CAfile {sCAfile}")
try: try:
from https_adapter import HTTPSAdapter from https_adapter import HTTPSAdapter
except Exception as e: except Exception as e:
LOG.warn(f"Could not import HTTPSAdapter {e}") LOG.warn(f"Could not import HTTPSAdapter {e}")
HTTPSAdapter = None HTTPSAdapter = None
HTTPSAdapter = None
try: try:
with requests.sessions.Session() as session: with requests.sessions.Session() as session:
if HTTPSAdapter: if HTTPSAdapter:
retries = Retry(connect=3, read=2, redirect=0)
# FixMe: upgrade to TLS1.3 # FixMe: upgrade to TLS1.3
session.mount("https://", HTTPSAdapter(pool_maxsize=1, session.mount("https://", HTTPSAdapter(pool_maxsize=1,
max_retries=3,)) max_retries=retries,))
fullfile = session.request(method="get", url=uri, oReqResp = session.request(method="get", url=uri,
proxies=proxy, timeout=timeout, proxies=proxy, timeout=timeout,
headers=headers, headers=headers,
allow_redirects=False, allow_redirects=False,
@ -245,16 +248,21 @@ def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
) )
except: except:
LOG.warn("HTTP GET request failed for %s" % uri) LOG.warn("HTTP GET request failed for %s" % uri)
return [] raise
if fullfile.status_code != 200 or not fullfile.headers['Content-Type'].startswith('text/plain'): if oReqResp.status_code != 200:
return [] raise TrustorError(f"HTTP Errorcode {head.status_code}")
if not oReqResp.headers['Content-Type'].startswith('text/plain'):
raise TrustorError(f"HTTP Content-Type != text/plain")
#check for redirects (not allowed as per spec) #check for redirects (not allowed as per spec)
if fullfile.url != uri: if oReqResp.url != uri:
LOG.error('Redirect detected %s vs %s (final)' % (uri, fullfile.url)) LOG.error(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.url))
return [] raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.url))
return oReqResp
well_known_content = fullfile.text.upper().strip().split('\n') def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
o = oDownloadUrl(domain, sCAfile, timeout=timeout, host=host, port=port)
well_known_content = o.text.upper().strip().split('\n')
well_known_content = [i for i in well_known_content if i and len(i) == 40] well_known_content = [i for i in well_known_content if i and len(i) == 40]
return well_known_content return well_known_content