Compare commits

..

6 Commits

Author SHA1 Message Date
c693937271 updates 2024-02-24 07:32:50 +00:00
79c9ac434a update 2024-02-10 23:47:40 +00:00
4017ff1a80 update 2024-02-05 14:30:11 +00:00
17f6264c18 update 2024-02-05 08:14:23 +00:00
9c97bb9acb update 2024-01-17 14:12:46 +00:00
96c453607f improvements 2024-01-16 14:54:24 +00:00
16 changed files with 232 additions and 204 deletions

1
.gitignore vendored
View File

@ -6,6 +6,7 @@ __pycache__/
*~
*.junk
*.bak
*.dst
# C extensions

View File

@ -1,25 +1,48 @@
PREFIX=/usr/local
PYTHON_EXE_MSYS=${PREFIX}/bin/python3.sh
PIP_EXE_MSYS=${PREFIX}/bin/pip3.sh
PYTHON_MINOR=`python3 --version 2>&1 | sed -e 's@^.* @@' -e 's@\.[0-9]*$$@@'`
LOCAL_DOCTEST=${PREFIX}/bin/toxcore_run_doctest3.bash
DOCTEST=${LOCAL_DOCTEST}
MOD=exclude_badExits
all:: README.md exclude_badExits.hlp
exclude_badExits.hlp:: src/exclude_badExits/__main__.py
PYTHONPATH=${PWD}/src \
${PYTHON_EXE_MSYS} -m exclude_badExits --help > exclude_badExits.hlp
README.md:: src/exclude_badExits/__main__.py
PYTHONPATH=${PWD}/src \
${PYTHON_EXE_MSYS} -c 'from exclude_badExits.__main__ import __doc__; print(__doc__)' \
> README.md
check::
sh python3.sh -c "import ${MOD}"
sh ${PYTHON_EXE_MSYS} -c "import ${MOD}"
lint::
sh .pylint.sh
install::
pip3.sh install --target ${PREFIX}/lib/python3.11/site-packages/ --upgrade .
${PIP_EXE_MSYS} --python ${PYTHON_EXE_MSYS} install \
--no-deps \
--target ${PREFIX}/lib/python${PYTHON_MINOR}/site-packages/ \
--upgrade .
sed -i -e "1s@/usr/bin/python${PYTHON_MINOR}@${PYTHON_EXE_MSYS}@" \
${PREFIX}/lib/python${PYTHON_MINOR}/site-packages/bin/*
rsync::
bash .rsync.sh
test:: doctest
test::
env PYTHONPATH=${PWD}/src ${PYTHON_EXE_MSYS} ${PWD}/src/${MOD}/exclude_badExits.py --help
env PYTHONPATH=${PWD}/src TOR_CONTROLLER_PASSWORD=${PASS} ${PYTHON_EXE_MSYS} src/${MOD}/torcontactinfo.py
doctest::
export PYTHONPATH=${PWD}/src/${MOD}
${DOCTEST} ${MOD}.txt
env PYTHONPATH=${PWD}/src ${DOCTEST} ${MOD}.txt
veryclean:: clean
rm -rf build dist src/exclude_badExits.egg-info/
clean::
find * -name \*~ -delete

View File

@ -89,6 +89,9 @@ Look in tor's notice.log for 'Every introduction point for service'
```notice_log``` will parse the notice log for warnings about relays and
services that will then be whitelisted.
```--torrc``` will read a file like /etc/tor/torrc and make some
suggestions based on what it finds; it will not edit or change the file.
```--torrc_output``` will write the torrc ExcludeNodes configuration to a file.
```--good_contacts``` will write the contact info as a ciiss dictionary
@ -129,9 +132,6 @@ To be "good" the ContactInfo must:
```--wellknown_output``` will make the program write the well-known files
(```/.well-known/tor-relay/rsa-fingerprint.txt```) to a directory.
```--torrc_output``` will write a file of the commands that it sends to
the Tor controller, so you can include it in a ```/etc/toc/torrc```.
```--relays_output write the download relays in json to a file. The relays
are downloaded from https://onionoo.torproject.org/details

0
exclude_badExits-installer.bash Normal file → Executable file
View File

0
exclude_badExits-pki.bash Normal file → Executable file
View File

20
exclude_badExits.bash Normal file → Executable file
View File

@ -6,12 +6,22 @@ SOCKS_PORT=9050
SOCKS_HOST=127.0.0.1
CAFILE=/etc/ssl/certs/ca-certificates.crt
# you may have a special python for installed packages
EXE=`which python3.bash`
EXE=`which python3.sh`
MOD=exclude_badExits
if [ ! -f exclude_badExits.hlp ] || \
[ exclude_badExits.hlp -nt src/exclude_badExits/exclude_badExits.py ] ; then
PYTHONPATH=$PWD/src \
$EXE src/${MOD}/exclude_badExits.py --help > exclude_badExits.hlp
fi
if [ ! -f README.md ] || \
[ README.md -nt src/exclude_badExits/exclude_badExits.py ] ; then
PYTHONPATH=$PWD/src \
$EXE -c 'from exclude_badExits.exclude_badExits import __doc__; print(__doc__)' \
> README.md
fi
[ -f exclude_badExits.hlp ] || \
$EXE exclude_badExits.py --help > exclude_badExits.hlp
[ -f README.md ] || \
$EXE -c 'from exclude_badExits import __doc__; print(__doc__)' > README.md
# an example of running exclude_badExits with full debugging
# expected to 20 minutes or so
declare -a LARGS

View File

@ -16,7 +16,26 @@ The basic idea is to exclude Exit nodes that do not have ContactInfo:
That can be extended to relays that do not have an email in the contact,
or to relays that do not have ContactInfo that is verified to include them.
Pass the controller password if needed as an environment variable:
You can see the status of tor relays at https://torstatus.rueckgr.at/
The code for that site is at https://github.com/paulchen/torstatus
You can get a list of exit relays that are marked bad with:
wget --post-data='SR=FBadExit&SO=Asc&FBadExit=1' 'https://torstatus.rueckgr.at/'
It is assumed that you are running a tor that has its torrc configured with:
ControlPort 127.0.0.1:9051
and/or
ControlSocket /run/tor/control
ControlSocketsGroupWritable 1
and
HashedControlPassword 16:B4155E403F37446360B30D0481C3BB03C083F0E3BB689883A3838E4692
so that you have some security on the Control connection.
Pass the controller password to these scripts as an environment variable:
>>> import os
>>> assert os.environ['TOR_CONTROLLER_PASSWORD']
@ -64,3 +83,5 @@ This may take a while:
>>> lArgs = ['--proxy_ctl', '9051']
>>> exclude_badExits.iMain(lArgs)
There is a doctest test document in exclude_badExits.txt

View File

@ -17,17 +17,15 @@ classifiers = [
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: Implementation :: CPython",
]
dynamic = ["version", "readme", ] # cannot be dynamic ['license']
scripts = { exclude_badExits = "exclude_badExits.exclude_badExits:iMain" }
dependencies = [
'qasync >= 0.27.1',
'cryptography >= 41.0.7',
'rsa >= 4.9',
'stem >= 1.8.2']
dynamic = ["version", "readme", "dependencies"] # cannot be dynamic ['license']
[project.scripts]
exclude_badExits = "exclude_badExits.__main__:iMain"
[tool.setuptools.dynamic]
version = {attr = "exclude_badExits.__version__"}
readme = {file = ["README.md"]}
dependencies = {file = ["requirements.txt"]}
[project.license]
file = "LICENSE.md"
@ -40,8 +38,6 @@ requires = ["setuptools >= 61.0"]
build-backend = "setuptools.build_meta"
# Either or both of these don't work
#[tool.setuptools]
#packages = ["exclude_badExits"]
[tool.setuptools]
packages = ["exclude_badExits"]
#[tool.setuptools.packages.find]
#include = ["src"]

64
setup.cfg Normal file
View File

@ -0,0 +1,64 @@
[metadata]
classifiers =
License :: OSI Approved
Intended Audience :: Web Developers
Operating System :: POSIX :: BSD :: FreeBSD
Operating System :: POSIX :: Linux
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: Implementation :: CPython
Framework :: AsyncIO
description='Tox ctypes wrapping into Python'
long_description='A program to exclude bad exits on the Tor network'
url='https://git.plastiras.org/emdee/exclude_badExits/'
keywords='rotten-onions tor'
[options]
zip_safe = false
#python_requires = >=3.6
include_package_data =
"*" = ["*.txt"]
package_dir=
=src
packages = ["exclude_badExits"]
[options.packages.find]
where=src
[options.entry_points]
console_scripts =
exclude_badExits = exclude_badExits.exclude_badExits:iMain
[easy_install]
zip_ok = false
[flake8]
jobs = 1
max-line-length = 88
ignore =
E111
E114
E128
E225
E261
E302
E305
E402
E501
E502
E541
E701
E702
E704
E722
E741
F508
F541
W503
W601

View File

@ -1,47 +0,0 @@
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*
import re
from setuptools import setup, find_packages
with open("qasync/__init__.py") as f:
version = re.search(r'__version__\s+=\s+"(.*)"', f.read()).group(1)
long_description = "\n\n".join([
open("README.md").read(),
])
if __name__ == '__main__':
setup(
name="exclude_badExits",
version=__version__,
description="""A program to exclude bad exits on the Tor network""",
long_description=long_description,
author="Nusenu (originally)",
author_email='',
license="1clause BSD",
packages = find_packages(exclude=['test*']),
# url="",
# download_url="https://",
keywords=['exit nodes', 'Tor', 'tor onion controller'],
# maybe less - nothing fancy
python_requires="~=3.6",
# probably works on PyQt6 and PySide2 but untested
# https://github.com/CabbageDevelopment/qasync/
install_requires=['cryptography',
'rsa',
'stem',
'urllib3',
'yaml'],
entry_points={
'console_scripts': ['exclude_badExits = exclude_badExits.__main__:iMain', ]},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)

View File

@ -1,2 +1,8 @@
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
__version__ = "1.0.0"
__all__ = [
'exclude_utils',
'torcontactinfo',
'trustor_poc']

View File

@ -193,25 +193,23 @@ try:
except:
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
from support_onions import (bAreWeConnected, icheck_torrc, lIntroductionPoints,
from toxygen_wrapper.tests.support_onions import (
bAreWeConnected, icheck_torrc, lIntroductionPoints,
yKNOWN_NODNS, zResolveDomain)
from trustor_poc import TrustorError, idns_validate
try:
import xxxhttpx
from exclude_badExits.trustor_poc import TrustorError, idns_validate
if False:
import httpx
import asyncio
from trustor_poc import oDownloadUrlHttpx
except:
from exclude_badExits.trustor_poc import oDownloadUrlHttpx
else:
httpx = None
from trustor_poc import oDownloadUrlUrllib3Socks as oDownloadUrl
from exclude_badExits.trustor_poc import oDownloadUrlUrllib3Socks as oDownloadUrl
try:
from torcontactinfo import TorContactInfoParser
oPARSER = TorContactInfoParser()
except ImportError:
oPARSER = None
from exclude_badExits.torcontactinfo import TorContactInfoParser
from exclude_utils import (aCleanContact, sCleanEmail, aParseContact,
from exclude_badExits.exclude_utils import (
aCleanContact, sCleanEmail, aParseContact,
oStemController, oMainArgparser,
vwrite_goodnodes, vwrite_badnodes, vwrite_good_contacts,
vwritefinale, vsetup_logging )
@ -220,6 +218,8 @@ warnings.filterwarnings('ignore')
global LOG
LOG = logging.getLogger()
oPARSER = TorContactInfoParser()
aGOOD_CONTACTS_DB = {}
aGOOD_CONTACTS_FPS = {}
aBAD_CONTACTS_DB = {}
@ -554,10 +554,10 @@ def aContactFps(oargs, a, fp, o, domain):
if oargs.wellknown_output:
sdir = os.path.join(oargs.wellknown_output, domain,
'.well-known', 'tor-relay')
sfile = os.path.join(sdir, "rsa-fingerprint.txt")
try:
if not os.path.isdir(sdir):
os.makedirs(sdir)
sfile = os.path.join(sdir, "rsa-fingerprint.txt")
with open(sfile, 'wt') as oFd:
oFd.write(data)
except Exception as e:
@ -664,6 +664,7 @@ def bProcessContact(b, texclude_set, aBadContacts, iFakeContact=0):
return True
lNOT_IN_RELAYS_DB = []
def bCheckFp(relay, sofar, lConds, texclude_set):
global aGOOD_CONTACTS_DB
global aGOOD_CONTACTS_FPS
@ -852,7 +853,6 @@ def iMain(lArgs):
iFakeContact = 0
iTotalContacts = 0
aBadContacts = {}
lNOT_IN_RELAYS_DB = []
iR = 0
relays = controller.get_server_descriptors()
lqueue = []

View File

@ -11,7 +11,7 @@ import logging
import warnings
global LOG
from support_onions import (oGetStemController,
from toxygen_wrapper.tests.support_onions import (oGetStemController,
vwait_for_controller,)
try:
@ -20,8 +20,6 @@ try:
yaml.indent(mapping=2, sequence=2)
safe_load = yaml.load
except:
yaml = None
if yaml is None:
try:
import yaml
safe_load = yaml.safe_load
@ -36,6 +34,15 @@ try:
except ImportError:
coloredlogs = False
lMORONS = ['hoster:Quintex Alliance Consulting ']
oCONTACT_RE = re.compile(r'([^:]*)(\s+)(email|url|proof|ciissversion|abuse|gpg):')
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone',
'sandbox', 'offlinemasterkey']
lEMAILS = ['abuse', 'email']
ETC_DIR = '/usr/local/etc/tor/yaml'
def aCleanContact(a, lAT_REPS, lDOT_REPS, lNO_EMAIL) -> dict:
# cleanups
for elt in lINTS:
@ -78,15 +85,6 @@ def sCleanEmail(s, lAT_REPS, lDOT_REPS, lNO_EMAIL) -> str:
s = s.replace(elt, '?')
return s
lMORONS = ['hoster:Quintex Alliance Consulting ']
oCONTACT_RE = re.compile(r'([^:]*)(\s+)(email|url|proof|ciissversion|abuse|gpg):')
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone',
'sandbox', 'offlinemasterkey']
lEMAILS = ['abuse', 'email']
ETC_DIR = '/usr/local/etc/tor/yaml'
def oStemController(oargs, sEXCLUDE_EXIT_GROUP):
if os.path.exists(oargs.proxy_ctl):
controller = oGetStemController(log_level=oargs.log_level,
@ -224,51 +222,6 @@ def vwritefinale(oargs, lNOT_IN_RELAYS_DB) -> None:
LOG.info(f"For info on relays, try: https://onionoo.torproject.org/details")
# https://onionoo.torproject.org/details
def alt_vsetup_logging(theLOG, log_level, logfile='', stream=sys.stderr) -> None:
global LOG
LOG = theLOG
add = True
logging._defaultFormatter = logging.Formatter(datefmt='%m-%d %H:%M:%S')
logging._defaultFormatter.default_time_format = '%m-%d %H:%M:%S'
logging._defaultFormatter.default_msec_format = ''
if logfile:
add = logfile.startswith('+')
sub = logfile.startswith('-')
if add or sub:
logfile = logfile[1:]
kwargs['filename'] = logfile
if coloredlogs:
coloredlogs.DEFAULT_LEVEL_STYLES['info']=dict(color='white',bold=True)
coloredlogs.DEFAULT_LEVEL_STYLES['debug']=dict(color='cyan')
coloredlogs.DEFAULT_LEVEL_STYLES['warn']=dict(color='yellow',bold=True)
coloredlogs.DEFAULT_LEVEL_STYLES['error']=dict(color='red',bold=True)
coloredlogs.DEFAULT_FIELD_STYLES['levelname=']=dict(color='green', bold=True),
# https://pypi.org/project/coloredlogs/
aKw = dict(level=log_level,
logger=LOG,
stream=stream,
fmt='%(levelname)s %(message)s',
isatty=True,
milliseconds=False,
)
coloredlogs.install(**aKw)
if logfile:
oHandler = logging.FileHandler(logfile)
LOG.addHandler(oHandler)
LOG.info(f"Setting coloured log_level to {log_level} {stream}")
else:
kwargs = dict(level=log_level,
force=True,
format='%(levelname)s %(message)s')
logging.basicConfig(**kwargs)
if add and logfile:
oHandler = logging.StreamHandler(stream)
LOG.addHandler(oHandler)
LOG.info(f"SSetting log_level to {log_level!s}")
def vsetup_logging(theLOG, log_level, logfile='', stream=sys.stdout) -> None:
global LOG
LOG = theLOG
@ -306,7 +259,6 @@ def vsetup_logging(theLOG, log_level, logfile='', stream=sys.stdout) -> None:
kwargs = dict(level=log_level,
force=True,
format='%(levelname)s %(message)s')
logging.basicConfig(**kwargs)
if add and logfile:
oHandler = logging.StreamHandler(stream)

View File

@ -9,12 +9,6 @@ import socket
import sys
import time
if False:
import cepa as stem
from cepa.connection import MissingPassword
from cepa.control import Controller
from cepa.util.tor_tools import is_valid_fingerprint
else:
import stem
from stem.connection import MissingPassword
from stem.control import Controller

View File

@ -20,24 +20,24 @@ import os
import re
import sys
import json
import requests
import textwrap
try:
from rich import print as rprint
HAS_RICH = True
except ImportError:
def rprint(value='', *args, **kwargs):
if value not in [None, False, True] and isinstance(value, (dict, list, set, tuple)):
value = json.dumps(value, indent=4)
return print(value, *args, **kwargs)
# rprint = print
HAS_RICH = False
import logging
import warnings
warnings.filterwarnings('ignore')
import requests
import textwrap
from exclude_utils import vsetup_logging
# from rich import print as rprintxxx
# HAS_RICH = True
if True:
def rprint(value='', *args, **kwargs):
if value not in [None, False, True] and \
isinstance(value, (dict, list, set, tuple)):
value = json.dumps(value, indent=4)
return LOG.debug(value, *args, **kwargs)
# rprint = print
# HAS_RICH = False
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
class TorContactInfoParser(object):
email_regex = "^[a-zA-Z0-9.!#$%&*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\\.[a-zA-Z0-9-]+)*$"
@ -448,7 +448,7 @@ def cmd_scan(opts: argparse.Namespace, adata=None) -> int:
if opts.pretty:
rprint(result)
else:
print(result)
LOG.debug(result)
return 0
ETC_DIR = '/etc/tor/yaml'
@ -518,6 +518,13 @@ def oparser():
{sys.argv[0]} parse -np -j "Privex Inc. email:noc[]privex.io url:https://www.privex.io proof:uri-rsa pgp:288DD1632F6E8951 keybase:privexinc twitter:PrivexInc"
{{"email": "noc@privex.io", "url": "https://www.privex.io", "proof": "uri-rsa", "pgp": null, "keybase": "privexinc", "twitter": "PrivexInc"}}
"""))
cparser.add_argument('--relays_output', type=str,
dest='relays_output',
default=os.path.join(ETC_DIR, 'relays.json'),
help="Write the download relays in json to a file")
cparser.add_argument('-j', '--json', action='store_true',
default=False, dest='json',
help="Output real JSON, not Python dict format.")
cparser.set_defaults(func=cmd_scan, json=False, pretty=False)
subparse = cparser.add_subparsers()
subparse.required = False
@ -527,13 +534,6 @@ def oparser():
sp_parse.add_argument('-np', '--no-pretty',
action='store_false', default=False, dest='pretty',
help="Disable pretty printing JSON")
sp_parse.add_argument('--relays_output', type=str,
dest='relays_output',
default=os.path.join(ETC_DIR, 'relays.json'),
help="Write the download relays in json to a file")
sp_parse.add_argument('-j', '--json', action='store_true',
default=False, dest='json',
help="Output real JSON, not Python dict format.")
sp_parse.set_defaults(func=cmd_parse)
sp_scan = subparse.add_parser('scan', help="Parse all contacts from https://onionoo.torproject.org/details")
@ -544,24 +544,28 @@ def oparser():
return cparser
if __name__ == "__main__":
if os.environ.get('DEBUG', ''):
log_level = 10
else:
log_level = 20
LOG = logging.getLogger()
vsetup_logging(LOG, log_level)
try:
def iMain(lArgs=None):
cparser = oparser()
opts = cparser.parse_args(sys.argv[1:])
opts = cparser.parse_args(lArgs)
data = None
if opts.relays_output and os.path.exists(opts.relays_output):
data = open(opts.relays_output, 'rt').read()
i = cmd_scan(opts, data)
return i
if __name__ == "__main__":
from exclude_utils import vsetup_logging
if os.environ.get('DEBUG', ''):
log_level = 10 # logging.DEBUG
else:
log_level = 20 # logging.INFO
vsetup_logging(LOG, log_level)
try:
i = iMain(sys.argv[1:])
except KeyboardInterrupt as e:
i = 0
except (requests.exceptions.ProxyError, Exception,) as e:
LOG.error(f"{e}")
LOG.exception(f"Exception: {e}", exc_info=True)
i = 0
sys.exit(i)

View File

@ -9,7 +9,7 @@ import re
import sys
import ipaddress
import warnings
import logging
import urllib3.util
from urllib3.util import parse_url as urlparse
@ -24,7 +24,6 @@ except:
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
global LOG
import logging
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
@ -36,7 +35,7 @@ logging.getLogger("urllib3").setLevel(logging.INFO)
# https://github.com/erans/torcontactinfoparser
# sys.path.append('/home/....')
try:
from torcontactinfo import TorContactInfoParser
from exclude_badExits.torcontactinfo import TorContactInfoParser
except:
TorContactInfoParser = None
@ -218,7 +217,7 @@ def find_validation_candidates(controller,
result[domain] = {prooftype: [fingerprint]}
return result
def oDownloadUrlRequests(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050, content_type='text/plain', session=None):
def oDownloadUrlRequests(uri, sCAfile: str, timeout: int = 30, host: str = '127.0.0.1', port:int = 9050, content_type: str = 'text/plain', session=None):
import requests
# socks proxy used for outbound web requests (for validation of proofs)
proxy = {'https': "socks5h://{host}:{port}"}
@ -266,7 +265,10 @@ def oDownloadUrlRequests(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050,
return oReqResp
# There's no point in using asyncio because of duplicate urls in the tasks
async def oDownloadUrlHttpx(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050, content_type='text/plain'):
async def oDownloadUrlHttpx(uri:str, sCAfile:str, timeout:int = 30,
host:str = '127.0.0.1', port:int = 9050,
content_type:str = 'text/plain'):
import httpcore
import asyncio
import httpx
@ -502,7 +504,8 @@ def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
well_known_content = [i for i in well_known_content if i and len(i) == 40]
return well_known_content
def validate_proofs(candidates, validation_cache_file, timeout=20, host='127.0.0.1', port=9050):
def validate_proofs(candidates, validation_cache_file, timeout=20, host='127.0.0.1', port=9050, CAfile:str = '/etc/ssl/certs/ca-certificates.crt'):
'''
This function takes the return value of find_validation_candidates()
and validated them according to their proof type (uri-rsa, dns-rsa)
@ -619,7 +622,8 @@ if __name__ == '__main__':
validate_proofs(r, validation_cache_file,
timeout=timeout,
host=controller_address,
port=port)
port=port,
CAfile=CAfile)
# refresh list with newly validated fingerprints
trusted_fingerprints = read_local_validation_cache(validation_cache_file,