This commit is contained in:
emdee 2022-10-01 19:01:20 +00:00
parent 4f1f68e438
commit 8018e5a89e
5 changed files with 21 additions and 222 deletions

1
.gitignore vendored
View File

@ -3,6 +3,7 @@
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
*.diff
# C extensions # C extensions
*.so *.so

View File

@ -16,7 +16,8 @@ that every call in ```tox.py``` has the right signature, but it runs
It has been tested with UDP and TCP proxy (Tor). It has ***not*** been It has been tested with UDP and TCP proxy (Tor). It has ***not*** been
tested on Windows, and there may be some minor breakage, which should be tested on Windows, and there may be some minor breakage, which should be
easy to fix. There is a good coverage integration testsuite in ```tests```. easy to fix. There is a good coverage integration testsuite in ```wrapper_tests```.
Change to that directory and run ```tests_wrapper.py --help```.
## Install ## Install

View File

@ -5,6 +5,7 @@ import sys
import logging import logging
from io import BytesIO from io import BytesIO
import urllib import urllib
import traceback
global LOG global LOG
LOG = logging.getLogger('app.'+'ts') LOG = logging.getLogger('app.'+'ts')
@ -109,6 +110,7 @@ def download_url(url, settings=None):
return result return result
except Exception as ex: except Exception as ex:
LOG.error('TOX nodes loading error with pycurl: ' + str(ex)) LOG.error('TOX nodes loading error with pycurl: ' + str(ex))
LOG.error('\n' + traceback.format_exc())
# drop through # drop through
if requests: if requests:
@ -141,7 +143,7 @@ def download_url(url, settings=None):
# drop through # drop through
if not settings['proxy_type']: # no proxy if not settings['proxy_type']: # no proxy
LOG.debug('nodes loading with no proxy: ' + str(url)) LOG.debug('nodes loading with urllib no proxy: ' + str(url))
try: try:
req = urllib.request.Request(url) req = urllib.request.Request(url)
req.add_header('Content-Type', 'application/json') req.add_header('Content-Type', 'application/json')
@ -152,33 +154,6 @@ def download_url(url, settings=None):
except Exception as ex: except Exception as ex:
LOG.error('TOX nodes loading ' + str(ex)) LOG.error('TOX nodes loading ' + str(ex))
return '' return ''
else: # proxy
try:
from PyQt5 import QtNetwork
from PyQt5 import QtCore
except:
pass
else:
LOG.debug(f"TOX nodes loading with QT proxy: {url}")
netman = QtNetwork.QNetworkAccessManager()
proxy = QtNetwork.QNetworkProxy()
proxy.setType(
QtNetwork.QNetworkProxy.Socks5Proxy if settings['proxy_type'] == 2 \
else QtNetwork.QNetworkProxy.HttpProxy )
proxy.setHostName(settings['proxy_host'])
proxy.setPort(int(settings['proxy_port']))
netman.setProxy(proxy)
try:
request = QtNetwork.QNetworkRequest()
request.setUrl(QtCore.QUrl(url))
reply = netman.get(request)
while not reply.isFinished():
QtCore.QThread.msleep(1)
QtCore.QCoreApplication.processEvents()
result = bytes(reply.readAll().data())
LOG.info('TOX nodes loading with QT proxy: ' + str(url))
return result
except Exception as ex:
LOG.error('TOX nodes loading error with proxy: ' + str(ex))
return '' return ''

View File

@ -5,24 +5,13 @@ import sys
import argparse import argparse
import re import re
import logging import logging
import urllib
import json import json
from ctypes import * from ctypes import *
from io import BytesIO
import time, contextlib import time, contextlib
import unittest import unittest
from random import Random from random import Random
random = Random() random = Random()
try:
import pycurl
except ImportError:
pycurl = None
try:
import requests
except ImportError:
requests = None
from PyQt5 import QtCore, QtWidgets from PyQt5 import QtCore, QtWidgets
from qtpy.QtWidgets import QApplication from qtpy.QtWidgets import QApplication
@ -33,6 +22,8 @@ try:
except ImportError: except ImportError:
get_user_config_path = None get_user_config_path = None
from wrapper_tests.support_http import pick_up_proxy_from_environ, download_url, bAreWeConnected
# LOG=util.log # LOG=util.log
global LOG global LOG
LOG = logging.getLogger('app.'+'ts') LOG = logging.getLogger('app.'+'ts')
@ -62,7 +53,6 @@ else:
iTHREAD_TIMEOUT = 1 iTHREAD_TIMEOUT = 1
iTHREAD_SLEEP = 1 iTHREAD_SLEEP = 1
iTHREAD_JOINS = 5 iTHREAD_JOINS = 5
CONNECT_TIMEOUT = 20.0
lToxSamplerates = [8000, 12000, 16000, 24000, 48000] lToxSamplerates = [8000, 12000, 16000, 24000, 48000]
lToxSampleratesK = [8, 12, 16, 24, 48] lToxSampleratesK = [8, 12, 16, 24, 48]
@ -78,34 +68,11 @@ lBOOLEANS = [
'hole_punching_enabled', 'hole_punching_enabled',
'dht_announcements_enabled', 'dht_announcements_enabled',
'save_history', 'save_history',
'download_nodes_list']
lBOOLEANS = [
'local_discovery_enabled',
'udp_enabled',
'ipv6_enabled',
'compact_mode',
'allow_inline',
'notifications',
'sound_notifications',
'hole_punching_enabled',
'dht_announcements_enabled',
'save_history',
'download_nodes_list' 'download_nodes_list'
'core_logging', 'core_logging',
] ]
def bAreWeConnected():
# FixMe: Linux
sFile = f"/proc/{os.getpid()}/net/route"
if not os.path.isfile(sFile): return None
i = 0
for elt in open(sFile, "r").readlines():
if elt.startswith('Iface'): continue
if elt.startswith('lo'): continue
i += 1
return i > 0
lNEW = [ # ngc_jfreeg2: lNEW = [ # ngc_jfreeg2:
('104.244.74.69', 38445, # tox.plastiras.org ('104.244.74.69', 38445, # tox.plastiras.org
'5E47BA1DC3913EB2CBF2D64CE4F23D8BFE5391BFABE5C43C5BAD13F0A414CD77'), '5E47BA1DC3913EB2CBF2D64CE4F23D8BFE5391BFABE5C43C5BAD13F0A414CD77'),
@ -285,8 +252,6 @@ lLOCAL = [# /etc/init.d/tox-bootstrapd.conf
'A22E68642917F424E5B38E98CACE38A4906B67228D83E507084400B597D5722E'), 'A22E68642917F424E5B38E98CACE38A4906B67228D83E507084400B597D5722E'),
] ]
lNO_PROXY = ['localhost', '127.0.0.1']
def assert_main_thread(): def assert_main_thread():
# this "instance" method is very useful! # this "instance" method is very useful!
app_thread = QtWidgets.QApplication.instance().thread() app_thread = QtWidgets.QApplication.instance().thread()
@ -373,7 +338,8 @@ def tox_log_cb(level, filename, line, func, message, *args):
level = 20 # LOG.info level = 20 # LOG.info
o = LOG.makeRecord(filename, level, func, line, message, list(), None) o = LOG.makeRecord(filename, level, func, line, message, list(), None)
LOG.handle(o) # LOG.handle(o)
LOG_TRACE(f"{level}: {func}{line} {message}")
return return
elif level == 1: elif level == 1:
@ -387,7 +353,7 @@ def tox_log_cb(level, filename, line, func, message, *args):
elif level == 5: elif level == 5:
LOG.debug(f"{level}: {message}") LOG.debug(f"{level}: {message}")
else: else:
LOG.trace(f"{level}: {message}") LOG_TRACE(f"{level}: {message}")
def vAddLoggerCallback(tox_options, callback=None): def vAddLoggerCallback(tox_options, callback=None):
if callback is None: if callback is None:
@ -542,151 +508,6 @@ def lSdSamplerates(iDev):
supported_samplerates.append(fs) supported_samplerates.append(fs)
return supported_samplerates return supported_samplerates
def should_we_pick_up_proxy_from_environ():
retval = dict()
if os.environ.get('socks_proxy', ''):
# socks_proxy takes precedence over https/http
proxy = os.environ.get('socks_proxy', '')
i = proxy.find('//')
if i >= 0: proxy = proxy[i+2:]
retval['proxy_host'] = proxy.split(':')[0]
retval['proxy_port'] = proxy.split(':')[-1]
retval['proxy_type'] = 2
retval['udp_enabled'] = False
elif os.environ.get('https_proxy', ''):
# https takes precedence over http
proxy = os.environ.get('https_proxy', '')
i = proxy.find('//')
if i >= 0: proxy = proxy[i+2:]
retval['proxy_host'] = proxy.split(':')[0]
retval['proxy_port'] = proxy.split(':')[-1]
retval['proxy_type'] = 1
retval['udp_enabled'] = False
elif os.environ.get('http_proxy', ''):
proxy = os.environ.get('http_proxy', '')
i = proxy.find('//')
if i >= 0: proxy = proxy[i+2:]
retval['proxy_host'] = proxy.split(':')[0]
retval['proxy_port'] = proxy.split(':')[-1]
retval['proxy_type'] = 1
retval['udp_enabled'] = False
return retval
def download_url(url, app):
if not bAreWeConnected(): return ''
settings = app._settings
if pycurl:
LOG.debug('nodes loading with pycurl: ' + str(url))
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, url)
c.setopt(c.WRITEDATA, buffer)
# Follow redirect.
c.setopt(c.FOLLOWLOCATION, True)
# cookie jar
cjar = os.path.join(os.environ['HOME'], '.local', 'jar.cookie')
if os.path.isfile(cjar):
c.setopt(c.COOKIEFILE, cjar)
# LARGS+=( --cookie-jar --junk-session-cookies )
#? c.setopt(c.ALTSVC_CTRL, 16)
c.setopt(c.NOPROXY, ','.join(lNO_PROXY))
#? c.setopt(c.CAINFO, certifi.where())
if settings['proxy_type'] == 2 and settings['proxy_host']:
socks_proxy = 'socks5h://'+settings['proxy_host']+':'+str(settings['proxy_port'])
settings['udp_enabled'] = False
c.setopt(c.PROXY, socks_proxy)
c.setopt(c.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
elif settings['proxy_type'] == 1 and settings['proxy_host']:
https_proxy = 'https://'+settings['proxy_host']+':'+str(settings['proxy_port'])
c.setopt(c.PROXY, https_proxy)
elif settings['proxy_type'] == 1 and settings['proxy_host']:
http_proxy = 'http://'+settings['proxy_host']+':'+str(settings['proxy_port'])
c.setopt(c.PROXY, http_proxy)
c.setopt(c.PROTOCOLS, c.PROTO_HTTPS)
try:
c.perform()
c.close()
#? assert c.getinfo(c.RESPONSE_CODE) < 300
result = buffer.getvalue()
# Body is a byte string.
LOG_INFO('nodes loaded with pycurl: ' + str(url))
return result
except Exception as ex:
LOG_ERROR('TOX nodes loading error with pycurl: ' + str(ex))
# drop through
if requests:
LOG_DEBUG('nodes loading with requests: ' + str(url))
try:
headers = dict()
headers['Content-Type'] = 'application/json'
proxies = dict()
if settings['proxy_type'] == 2 and settings['proxy_host']:
socks_proxy = 'socks5://'+settings['proxy_host']+':'+str(settings['proxy_port'])
settings['udp_enabled'] = False
proxies['https'] = socks_proxy
elif settings['proxy_type'] == 1 and settings['proxy_host']:
https_proxy = 'https://'+settings['proxy_host']+':'+str(settings['proxy_port'])
proxies['https'] = https_proxy
elif settings['proxy_type'] == 1 and settings['proxy_host']:
http_proxy = 'http://'+settings['proxy_host']+':'+str(settings['proxy_port'])
proxies['http'] = http_proxy
req = requests.get(url,
headers=headers,
proxies=proxies,
timeout=CONNECT_TIMEOUT)
# max_retries=3
assert req.status_code < 300
result = req.content
LOG_INFO('nodes loaded with requests: ' + str(url))
return result
except Exception as ex:
LOG_ERROR('TOX nodes loading error with requests: ' + str(ex))
# drop through
if not settings['proxy_type']: # no proxy
LOG_DEBUG('nodes loading with no proxy: ' + str(url))
try:
req = urllib.request.Request(url)
req.add_header('Content-Type', 'application/json')
response = urllib.request.urlopen(req)
result = response.read()
LOG_INFO('nodes loaded with no proxy: ' + str(url))
return result
except Exception as ex:
LOG_ERROR('TOX nodes loading ' + str(ex))
return ''
else: # proxy
from PyQt5 import QtNetwork
LOG_DEBUG(f"TOX nodes loading with QT proxy: {url}")
netman = QtNetwork.QNetworkAccessManager()
proxy = QtNetwork.QNetworkProxy()
proxy.setType(
QtNetwork.QNetworkProxy.Socks5Proxy if settings['proxy_type'] == 2 \
else QtNetwork.QNetworkProxy.HttpProxy )
proxy.setHostName(settings['proxy_host'])
proxy.setPort(settings['proxy_port'])
netman.setProxy(proxy)
try:
request = QtNetwork.QNetworkRequest()
request.setUrl(QtCore.QUrl(url))
reply = netman.get(request)
while not reply.isFinished():
QtCore.QThread.msleep(1)
QtCore.QCoreApplication.processEvents()
result = bytes(reply.readAll().data())
LOG_INFO('TOX nodes loading with QT proxy: ' + str(url))
return result
except Exception as ex:
LOG_ERROR('TOX nodes loading error with proxy: ' + str(ex))
return ''
def _get_nodes_path(oArgs=None): def _get_nodes_path(oArgs=None):
if oArgs and hasattr(oArgs, 'nodes_json') and oArgs.nodes_json: if oArgs and hasattr(oArgs, 'nodes_json') and oArgs.nodes_json:
LOG.debug("_get_nodes_path: " +oArgs.nodes_json) LOG.debug("_get_nodes_path: " +oArgs.nodes_json)

View File

@ -74,8 +74,8 @@ try:
import support_testing as ts import support_testing as ts
from support_testing import lGOOD, lLOCAL from support_testing import lGOOD, lLOCAL
except ImportError: except ImportError:
import tests.support_testing as ts import wrapper_tests.support_testing as ts
from tests.support_testing import lGOOD, lLOCAL from wrapper_tests.support_testing import lGOOD, lLOCAL
try: try:
from toxygen_tests import test_sound_notification from toxygen_tests import test_sound_notification
@ -125,7 +125,7 @@ if not hasattr(unittest, 'skip'):
def iNodeInfo(sProt, sHost, sPort, key=None, environ=None, bTest=False): def iNodeInfo(sProt, sHost, sPort, key=None, environ=None, bTest=False):
sFile = os.path.join("/tmp", f"{sHost}.{os.getpid()}.nmap") sFile = os.path.join("/tmp", f"{sHost}.{os.getpid()}.nmap")
if True or sProt in ['socks', 'socks5', 'tcp4']: if sProt in ['socks', 'socks5', 'tcp4']:
cmd = f"nmap -Pn -n -sT -p T:{sPort} {sHost} | grep /tcp >{sFile}" cmd = f"nmap -Pn -n -sT -p T:{sPort} {sHost} | grep /tcp >{sFile}"
else: else:
cmd = f"nmap -Pn -n -sU -p U:{sPort} {sHost} | grep /tcp >{sFile}" cmd = f"nmap -Pn -n -sU -p U:{sPort} {sHost} | grep /tcp >{sFile}"
@ -153,13 +153,13 @@ def bootstrap_iNodeInfo(lElts):
else: else:
protocol='ipv4' protocol='ipv4'
env = os.environ env = os.environ
lRetval = []
for elts in lElts[:8]: for elts in lElts[:8]:
iRet = -1 iRet = -1
try: try:
iRet = iNodeInfo(protocol, *elts) iRet = iNodeInfo(protocol, *elts)
if iRet != 0: if iRet != 0:
LOG.warn('iNodeInfo to ' +repr(elts[0]) +' : ' +str(iRet)) LOG.warn('iNodeInfo to ' +repr(elts[0]) +' retval=' +str(iRet))
lRetval += [False] lRetval += [False]
else: else:
LOG.info(f'bootstrap_iNodeInfo ' LOG.info(f'bootstrap_iNodeInfo '
@ -826,7 +826,8 @@ class ToxSuite(unittest.TestCase):
lElts = self.lUdp lElts = self.lUdp
lRetval = [] lRetval = []
random.shuffle(lElts) random.shuffle(lElts)
assert bootstrap_iNodeInfo(lElts) # assert
bootstrap_iNodeInfo(lElts)
def test_self_get_secret_key(self): # works def test_self_get_secret_key(self): # works
""" """