Compare commits

..

14 Commits

Author SHA1 Message Date
a1bad4139e update 2024-02-18 18:21:28 +00:00
7150957574 update 2024-02-14 03:03:29 +00:00
8bf481e6fc tox_wrapper 2024-02-13 18:21:08 +00:00
510e790155 update 2024-02-05 13:04:15 +00:00
7cf2f66783 update 2024-02-02 19:30:57 +00:00
84afbe61b8 update 2024-02-02 19:29:33 +00:00
c5aff5fd3b setup.py 2023-12-26 06:17:14 +00:00
ae6b8f443c update 2023-12-18 06:30:54 +00:00
3051db8a7a bugfix 2023-12-10 22:11:09 +00:00
57c316974b bugfix 2023-12-10 21:32:08 +00:00
c1b379e922 update 2023-12-07 19:48:39 +00:00
85d6715854 Minor fix 2023-07-14 14:10:11 +00:00
b3d46abb91 pep8 2022-11-17 11:49:31 +00:00
79d3270a8d stem 2022-10-29 05:47:03 +00:00
14 changed files with 785 additions and 335 deletions

198
.gitignore vendored
View File

@ -1,166 +1,34 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
*.diff
.pylint.*
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
.pylint.err .pylint.err
.pylint.log .pylint.out
*.pyc
*.pyo
libs/
*.egg-info
*.log
*.out
*.bak
.idea
*~
#*
*.iml
*.junk
*.so
*.log
toxygen/build
toxygen/dist
*.spec
dist
toxygen/avatars
toxygen/__pycache__
/*.egg-info
/*.egg
html
Toxygen.egg-info
*.tox
.cache
*.db
*~
Makefile

7
.rsync.sh Normal file
View File

@ -0,0 +1,7 @@
#!/bin/sh
find * -name \*.py | xargs grep -l '[ ]*$' | xargs sed -i -e 's/[ ]*$//'
rsync "$@" -vax --include \*.py --exclude \*.log --exclude \*.out \
--exclude \*.egg-info --exclude libs --exclude dist --exclude build \
--exclude \*.pyc --exclude .pyl\* --exclude \*~ --exclude \*.so \
./ ../tox_profile.git/|grep -v /$

View File

@ -7,7 +7,7 @@ Read and manipulate tox profile files. It started as a simple script from
things that it finds. Then can write what it found in JSON/YAML/REPR/PPRINT things that it finds. Then can write what it found in JSON/YAML/REPR/PPRINT
to a file. It can also test the nodes in a profile using ```nmap```. to a file. It can also test the nodes in a profile using ```nmap```.
( There are somtimes problems with the json info dump of bytes keys: ( There are sometimes problems with the json info dump of bytes keys:
```TypeError: Object of type bytes is not JSON serializable```) ```TypeError: Object of type bytes is not JSON serializable```)
It can also download, select, or test nodes in a ```DHTnode.json``` file. It can also download, select, or test nodes in a ```DHTnode.json``` file.
@ -222,10 +222,33 @@ Because it's written in Python it is easy to extend to, for example,
supporting multidevices: supporting multidevices:
<https://git.plastiras.org/emdee/tox_profile/wiki/MultiDevice-Announcements-POC> <https://git.plastiras.org/emdee/tox_profile/wiki/MultiDevice-Announcements-POC>
There are a couple of bash scripts to show usage:
* tox_profile_examples.bash - simple example usage
* tox_profile_test.bash - a real test runner that still needs documenting.
## Specification ## Specification
There is a copy of the Tox [spec](https://toktok.ltd/spec.html) There is a copy of the Tox [spec](https://toktok.ltd/spec.html)
in the repo - it is missing any description of the groups section. in the repo - it is missing any description of the groups section.
## Updates
Although Tox works over Tor, we do not recommend its usage for
anonymity as it leaks DNS requests due to a 6-year old known security
issue: https://github.com/TokTok/c-toxcore/issues/469 unless your Tox
client does hostname lookups before calling Tox (like
[toxygen](https://git.plastiras.org/emdee/toxygen) does).
Otherwise, do not use it for anonymous communication unless you have a
TCP and UDP firewall in place.
The Tox project does not follow semantic versioning so the project may
break the underlying ctypes wrapper at any time; it's not possible to
use Tox version numbers to tell what the API will be. The last git version
this code was tested with is ``1623e3ee5c3a5837a92f959f289fcef18bfa9c959```
of Feb 12 10:06:37 2024. In which case you'll have to go into the tox.py
file in https://git.plastiras.org/emdee/toxygen_wrapper to fix it yourself.
The uptodate version of this code is on https://git.plastiras.org/emdee/tox_profile
Work on this project is suspended until the Work on this project is suspended until the
[MultiDevice](https://git.plastiras.org/emdee/tox_profile/wiki/MultiDevice-Announcements-POC) problem is solved. Fork me! [MultiDevice](https://git.plastiras.org/emdee/tox_profile/wiki/MultiDevice-Announcements-POC) problem is solved. Fork me!

0
__init__.py Normal file
View File

48
pyproject.toml Normal file
View File

@ -0,0 +1,48 @@
[project]
name = 'tox_profile'
requires-python = ">= 3.7"
description = "Read and manipulate tox profile files"
keywords = ["tox", "tox_profile"]
classifiers = [
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
"Development Status :: 4 - Beta",
# Indicate who your project is intended for
"Intended Audience :: Developers",
# Specify the Python versions you support here.
"Programming Language :: Python :: 3",
"License :: OSI Approved",
"Operating System :: POSIX :: BSD :: FreeBSD",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: Implementation :: CPython",
]
dynamic = ["version", "readme", "dependencies"] # cannot be dynamic ['license']
[project.scripts]
tox_profile = "tox_profile:iMain"
[project.urls]
repository = "https://git.plastiras.org/emdee/tox_profile"
homepage = "https://git.plastiras.org/emdee/tox_profile"
[build-system]
requires = ["setuptools >= 61.0"]
build-backend = "setuptools.build_meta"
[tool.setuptools.dynamic]
version = {attr = "tox_profile.__version__"}
readme = {file = ["README.md"]}
dependencies = {file = ["requirements.txt"]}
#[tool.setuptools.packages.find]
#where = "src"

9
requirements.txt Normal file
View File

@ -0,0 +1,9 @@
# the versions are the current ones tested - may work with earlier versions
ruamel.yaml >= 0.18.5
xmsgpack >= 1.0.7
coloredlogs >= 15.0.1
# optional
# nmap
# this is not on pypi yet - get it from
# https://git.plastiras.org/emdee/toxygen_wrapper
# toxygen_wrapper >= 1.0.0

55
setup.cfg Normal file
View File

@ -0,0 +1,55 @@
[metadata]
classifiers =
License :: OSI Approved
Intended Audience :: Web Developers
Operating System :: POSIX :: BSD :: FreeBSD
Operating System :: POSIX :: Linux
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: Implementation :: CPython
description='Read and manipulate tox profile files'
long_description='Read and manipulate tox profile files'
url='https://git.plastiras.org/emdee/tox_profile/'
keywords='ctypes Tox'
[options]
zip_safe = false
python_requires = >=3.7
include_package_data =
"*" = ["*.txt", "*.bash"]
[options.entry_points]
console_scripts =
tox_profile = tox_profile.tox_profile:iMain
[easy_install]
zip_ok = false
[flake8]
jobs = 1
max-line-length = 88
ignore =
E111
E114
E128
E225
E261
E302
E305
E402
E501
E502
E541
E701
E702
E704
E722
E741
F508
F541
W503
W601

0
src/__init__.py Normal file
View File

View File

@ -0,0 +1,2 @@
__version__ = "1.0.0"

View File

@ -0,0 +1,5 @@
import sys
from tox_profile.tox_profile import iMain
if __name__ == '__main__':
sys.exit(iMain(sys.argv[1:]))

View File

@ -60,49 +60,62 @@ commands, or the filename of the nodes file for the nodes command.
# originally from: # originally from:
# https://stackoverflow.com/questions/30901873/what-format-are-tox-files-stored-in # https://stackoverflow.com/questions/30901873/what-format-are-tox-files-stored-in
import sys
import os
import time
import struct
from socket import inet_ntop, AF_INET6, AF_INET
import logging
import argparse import argparse
from pprint import pprint
import shutil
import json import json
import logging
import os
import shutil
import struct
import sys
import time
import warnings import warnings
from pprint import pprint
from socket import AF_INET, AF_INET6, inet_ntop
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
from toxygen_wrapper.tests import support_testing as ts
try: try:
# https://pypi.org/project/msgpack/ # https://pypi.org/project/msgpack/
import msgpack import msgpack
except ImportError as e: except ImportError as e: # noqa
msgpack = None msgpack = None
try: try:
import yaml import yaml
except ImportError as e: except ImportError as e: # noqa
yaml = None yaml = None
try:
import stem
except ImportError as e: # noqa
stem = None
try:
import nmap
except ImportError as e: # noqa
nmap = None
try: try:
# https://pypi.org/project/coloredlogs/ # https://pypi.org/project/coloredlogs/
import coloredlogs import coloredlogs
if 'COLOREDLOGS_LEVEL_STYLES' not in os.environ: if 'COLOREDLOGS_LEVEL_STYLES' not in os.environ:
os.environ['COLOREDLOGS_LEVEL_STYLES'] = 'spam=22;debug=28;verbose=34;notice=220;warning=202;success=118,bold;error=124;critical=background=red' os.environ['COLOREDLOGS_LEVEL_STYLES'] = 'spam=22;debug=28;verbose=34;notice=220;warning=202;success=118,bold;error=124;critical=background=red'
except ImportError as e: except ImportError as e: # noqa
coloredlogs = False coloredlogs = False
__version__ = "1.0.0"
try: try:
# https://git.plastiras.org/emdee/toxygen_wrapper # https://git.plastiras.org/emdee/toxygen_wrapper
from wrapper.toxencryptsave import ToxEncryptSave from toxygen_wrapper.toxencryptsave import ToxEncryptSave
from wrapper_tests.support_http import download_url, bAreWeConnected from toxygen_wrapper.tests import support_testing as ts
from wrapper_tests.support_testing import sTorResolve from toxygen_wrapper.tests.support_http import bAreWeConnected, download_url
from wrapper_tests import support_testing as ts from toxygen_wrapper.tests.support_testing import sTorResolve
except ImportError as e: except ImportError as e:
print(f"Import Warning {e}") print(f"Import Warning {e}")
print("Download toxygen_wrapper to deal with encrypted tox files, from:") print("Download toxygen_wrapper to deal with encrypted tox files, from:")
print("https://git.plastiras.org/emdee/toxygen_wrapper") print("https://git.plastiras.org/emdee/toxygen_wrapper")
print("Just put the parent of the wrapper directory on your PYTHONPATH") print("Just put the parent of the toxygen_wrapper directory on your PYTHONPATH")
print("You also need to link your libtoxcore.so and libtoxav.so") print("You also need to link your libtoxcore.so and libtoxav.so")
print("and libtoxencryptsave.so into wrapper/../libs/") print("and libtoxencryptsave.so into toxygen_wrapper/../libs/")
print("Link all 3 from libtoxcore.so if you have only libtoxcore.so") print("Link all 3 from libtoxcore.so if you have only libtoxcore.so")
ToxEncryptSave = None ToxEncryptSave = None
download_url = None download_url = None
@ -112,7 +125,20 @@ except ImportError as e:
LOG = logging.getLogger('TSF') LOG = logging.getLogger('TSF')
# Fix for Windows def LOG_error(a): print('EROR> '+a)
def LOG_warn(a): print('WARN> '+a)
def LOG_info(a):
bVERBOSE = iLOG_LEVEL <= 20
if bVERBOSE: print('INFO> '+a)
def LOG_debug(a):
bVERBOSE = iLOG_LEVEL <= 10-1
if bVERBOSE: print('DBUG> '+a)
def LOG_trace(a):
bVERBOSE = iLOG_LEVEL < 10
if bVERBOSE: print('TRAC> '+a)
__version__ = "0.1.0"
# FixMe for Windows
sDIR = os.environ.get('TMPDIR', '/tmp') sDIR = os.environ.get('TMPDIR', '/tmp')
sTOX_VERSION = "1000002018" sTOX_VERSION = "1000002018"
sVER_MIN = "1000002013" sVER_MIN = "1000002013"
@ -131,6 +157,7 @@ LOG.trace = trace
global bOUT, aOUT, sENC global bOUT, aOUT, sENC
aOUT = {} aOUT = {}
bOUT = b'' bOUT = b''
lLABELS = []
sENC = sys.getdefaultencoding() # 'utf-8' sENC = sys.getdefaultencoding() # 'utf-8'
lNULLS = ['', '[]', 'null'] lNULLS = ['', '[]', 'null']
lNONES = ['', '-', 'NONE'] lNONES = ['', '-', 'NONE']
@ -251,15 +278,15 @@ Length Contents
8 uint64_t Last seen time 8 uint64_t Last seen time
""" """
dStatus = { # Status Meaning dStatus = { # Status Meaning
0: 'Not a friend', 0: 'Not a friend',
1: 'Friend added', 1: 'Friend added',
2: 'Friend request sent', 2: 'Friend request sent',
3: 'Confirmed friend', 3: 'Confirmed friend',
4: 'Friend online' 4: 'Friend online'
} }
slen = 1+32+1024+1+2+128+2+1007+1+2+1+3+4+8 # 2216 slen = 1+32+1024+1+2+128+2+1007+1+2+1+3+4+8 # 2216
assert length % slen == 0 assert length % slen == 0, length
lIN = [] lIN = []
for i in range(length // slen): for i in range(length // slen):
delta = i*slen delta = i*slen
@ -516,7 +543,7 @@ def lProcessDHTnodes(state, index, length, result, label="DHTnode"):
return lIN return lIN
def process_chunk(index, state, oArgs=None): def process_chunk(index, state, oArgs=None):
global bOUT, aOUT global bOUT, aOUT, lLABELS
global sENC global sENC
length = struct.unpack_from("<I", state, index)[0] length = struct.unpack_from("<I", state, index)[0]
@ -537,6 +564,7 @@ def process_chunk(index, state, oArgs=None):
LOG.trace(f"PROCESS_CHUNK {label} index={index} bOUT={len(bOUT)} delta={diff} length={length}") LOG.trace(f"PROCESS_CHUNK {label} index={index} bOUT={len(bOUT)} delta={diff} length={length}")
if data_type == MESSENGER_STATE_TYPE_NOSPAMKEYS: if data_type == MESSENGER_STATE_TYPE_NOSPAMKEYS:
lLABELS += [label]
nospam = bin_to_hex(result[0:4]) nospam = bin_to_hex(result[0:4])
public_key = bin_to_hex(result[4:36]) public_key = bin_to_hex(result[4:36])
private_key = bin_to_hex(result[36:68]) private_key = bin_to_hex(result[36:68])
@ -565,6 +593,7 @@ def process_chunk(index, state, oArgs=None):
LOG.info(f"{label} {key} EDITED to {val}") LOG.info(f"{label} {key} EDITED to {val}")
elif data_type == MESSENGER_STATE_TYPE_DHT: elif data_type == MESSENGER_STATE_TYPE_DHT:
lLABELS += [label]
LOG.debug(f"process_chunk {label} length={length}") LOG.debug(f"process_chunk {label} length={length}")
if length > 4: if length > 4:
lIN = lProcessDHTnodes(state, index, length, result, "DHTnode") lIN = lProcessDHTnodes(state, index, length, result, "DHTnode")
@ -583,7 +612,8 @@ def process_chunk(index, state, oArgs=None):
LOG.info(f"{label} {key} EDITED to {val}") LOG.info(f"{label} {key} EDITED to {val}")
elif data_type == MESSENGER_STATE_TYPE_FRIENDS: elif data_type == MESSENGER_STATE_TYPE_FRIENDS:
LOG.info(f"{label} {length // 2216} FRIENDS {length % 2216}") lLABELS += [label]
LOG.info(f"{label} {length // 2216} friends mod={length % 2216}")
if length > 0: if length > 0:
lIN = lProcessFriends(state, index, length, result) lIN = lProcessFriends(state, index, length, result)
else: else:
@ -592,6 +622,7 @@ def process_chunk(index, state, oArgs=None):
aOUT.update({label: lIN}) aOUT.update({label: lIN})
elif data_type == MESSENGER_STATE_TYPE_NAME: elif data_type == MESSENGER_STATE_TYPE_NAME:
lLABELS += [label]
name = str(result, sENC) name = str(result, sENC)
LOG.info(f"{label} Nick_name = " +name) LOG.info(f"{label} Nick_name = " +name)
aIN = {"Nick_name": name} aIN = {"Nick_name": name}
@ -604,6 +635,7 @@ def process_chunk(index, state, oArgs=None):
LOG.info(f"{label} {key} EDITED to {val}") LOG.info(f"{label} {key} EDITED to {val}")
elif data_type == MESSENGER_STATE_TYPE_STATUSMESSAGE: elif data_type == MESSENGER_STATE_TYPE_STATUSMESSAGE:
lLABELS += [label]
mess = str(result, sENC) mess = str(result, sENC)
LOG.info(f"{label} StatusMessage = " +mess) LOG.info(f"{label} StatusMessage = " +mess)
aIN = {"Status_message": mess} aIN = {"Status_message": mess}
@ -616,6 +648,7 @@ def process_chunk(index, state, oArgs=None):
LOG.info(f"{label} {key} EDITED to {val}") LOG.info(f"{label} {key} EDITED to {val}")
elif data_type == MESSENGER_STATE_TYPE_STATUS: elif data_type == MESSENGER_STATE_TYPE_STATUS:
lLABELS += [label]
# 1 uint8_t status (0 = online, 1 = away, 2 = busy) # 1 uint8_t status (0 = online, 1 = away, 2 = busy)
dStatus = {0: 'online', 1: 'away', 2: 'busy'} dStatus = {0: 'online', 1: 'away', 2: 'busy'}
status = struct.unpack_from(">b", state, index)[0] status = struct.unpack_from(">b", state, index)[0]
@ -631,6 +664,7 @@ def process_chunk(index, state, oArgs=None):
LOG.info(f"{label} {key} EDITED to {val}") LOG.info(f"{label} {key} EDITED to {val}")
elif data_type == MESSENGER_STATE_TYPE_GROUPS: elif data_type == MESSENGER_STATE_TYPE_GROUPS:
lLABELS += [label]
if length > 0: if length > 0:
lIN = lProcessGroups(state, index, length, result, label) lIN = lProcessGroups(state, index, length, result, label)
else: else:
@ -639,6 +673,7 @@ def process_chunk(index, state, oArgs=None):
aOUT.update({label: lIN}) aOUT.update({label: lIN})
elif data_type == MESSENGER_STATE_TYPE_TCP_RELAY: elif data_type == MESSENGER_STATE_TYPE_TCP_RELAY:
lLABELS += [label]
if length > 0: if length > 0:
lIN = lProcessNodeInfo(state, index, length, result, "TCPnode") lIN = lProcessNodeInfo(state, index, length, result, "TCPnode")
LOG.info(f"TYPE_TCP_RELAY {len(lIN)} nodes {length} length") LOG.info(f"TYPE_TCP_RELAY {len(lIN)} nodes {length} length")
@ -654,6 +689,7 @@ def process_chunk(index, state, oArgs=None):
LOG.info(f"{label} {key} EDITED to {val}") LOG.info(f"{label} {key} EDITED to {val}")
elif data_type == MESSENGER_STATE_TYPE_PATH_NODE: elif data_type == MESSENGER_STATE_TYPE_PATH_NODE:
lLABELS += [label]
#define NUM_SAVED_PATH_NODES 8 #define NUM_SAVED_PATH_NODES 8
if not length % 8 == 0: if not length % 8 == 0:
# this should be an assert? # this should be an assert?
@ -670,6 +706,7 @@ def process_chunk(index, state, oArgs=None):
LOG.info(f"{label} {key} EDITED to {val}") LOG.info(f"{label} {key} EDITED to {val}")
elif data_type == MESSENGER_STATE_TYPE_CONFERENCES: elif data_type == MESSENGER_STATE_TYPE_CONFERENCES:
lLABELS += [label]
lIN = [] lIN = []
if length > 0: if length > 0:
LOG.debug(f"TODO process_chunk {label} bytes={length}") LOG.debug(f"TODO process_chunk {label} bytes={length}")
@ -680,10 +717,13 @@ def process_chunk(index, state, oArgs=None):
elif data_type != MESSENGER_STATE_TYPE_END: elif data_type != MESSENGER_STATE_TYPE_END:
LOG.error("UNRECOGNIZED datatype={datatype}") LOG.error("UNRECOGNIZED datatype={datatype}")
sys.exit(1) sys.exit(1)
else: else:
LOG.info("END") # That's all folks... LOG.info("END") # That's all folks...
# drop through # drop through
if len(lLABELS) == len(dSTATE_TYPE.keys()) - 1:
LOG.info(f"{len(lLABELS)} sections") # That's all folks...
else:
LOG.warn(f"{10 - len(lLABELS)} sections missing {lLABELS}") # That's all folks...
# We repack as we read: or edit as we parse; simply edit result and length. # We repack as we read: or edit as we parse; simply edit result and length.
# We'll add the results back to bOUT to see if we get what we started with. # We'll add the results back to bOUT to see if we get what we started with.
@ -708,31 +748,31 @@ ip=""
declare -a ports declare -a ports
jq '.|with_entries(select(.key|match("nodes"))).nodes[]|select(.status_tcp)|select(.ipv4|match("."))|.ipv4,.tcp_ports' | while read line ; do jq '.|with_entries(select(.key|match("nodes"))).nodes[]|select(.status_tcp)|select(.ipv4|match("."))|.ipv4,.tcp_ports' | while read line ; do
if [ -z "$ip" ] ; then if [ -z "$ip" ] ; then
ip=`echo $line|sed -e 's/"//g'` ip=`echo $line|sed -e 's/"//g'`
ports=() ports=()
continue continue
elif [ "$line" = '[' ] ; then elif [ "$line" = '[' ] ; then
continue continue
elif [ "$line" = ']' ] ; then elif [ "$line" = ']' ] ; then
if ! route | grep -q ^def ; then if ! route | grep -q ^def ; then
echo ERROR no route echo ERROR no route
exit 3 exit 3
fi fi
if [ "$ip" = '"NONE"' -o "$ip" = 'NONE' ] ; then if [ "$ip" = '"NONE"' -o "$ip" = 'NONE' ] ; then
: :
elif ping -c 1 $ip | grep '100% packet loss' ; then elif ping -c 1 $ip | grep '100% packet loss' ; then
echo WARN failed ping $ip echo WARN failed ping $ip
else
echo INFO $ip "${ports[*]}"
cmd="nmap -Pn -n -sT -p T:"`echo "${ports[*]}" |sed -e 's/ /,/g'`
echo DBUG $cmd $ip
$cmd $ip | grep /tcp
fi
ip=""
continue
else else
port=`echo $line|sed -e 's/,//'` echo INFO $ip "${ports[*]}"
ports+=($port) cmd="nmap -Pn -n -sT -p T:"`echo "${ports[*]}" |sed -e 's/ /,/g'`
echo DBUG $cmd $ip
$cmd $ip | grep /tcp
fi
ip=""
continue
else
port=`echo $line|sed -e 's/,//'`
ports+=($port)
fi fi
done""" done"""
@ -786,14 +826,14 @@ def lNodesCheckNodes(json_nodes, oArgs, bClean=False):
Checking NODES.json Checking NODES.json
""" """
lErrs = [] lErrs = []
iErrs = 0 ierrs = 0
nth = 0 nth = 0
if bClean: lNew=[] if bClean: lNew=[]
# assert type(json_nodes) == dict # assert type(json_nodes) == dict
bRUNNING_TOR = False bRUNNING_TOR = False
if bHAVE_TOR: if bHAVE_TOR:
iRet = os.system("netstat -nle4|grep -q :9050") iret = os.system("netstat -nle4|grep -q :9050")
if iRet == 0: if iret == 0:
bRUNNING_TOR = True bRUNNING_TOR = True
lOnions = [] lOnions = []
@ -831,8 +871,8 @@ def lNodesCheckNodes(json_nodes, oArgs, bClean=False):
elif True: elif True:
if not node[ipv] in lNONES and ipv == 'ipv4': if not node[ipv] in lNONES and ipv == 'ipv4':
# just ping for now # just ping for now
iRet = os.system(f"ping -c 1 {node[ipv]} > /dev/null") iret = os.system(f"ping -c 1 {node[ipv]} > /dev/null")
if iRet == 0: if iret == 0:
LOG.info(f"Pinged {node[ipv]}") LOG.info(f"Pinged {node[ipv]}")
else: else:
LOG.warn(f"Failed ping {node[ipv]}") LOG.warn(f"Failed ping {node[ipv]}")
@ -852,22 +892,22 @@ def lNodesCheckNodes(json_nodes, oArgs, bClean=False):
if node["version"] and node["version"] < "1000002013": if node["version"] and node["version"] < "1000002013":
lErrs += [nth] lErrs += [nth]
LOG.error(f"vulnerable version {node['version']} < 1000002013") LOG.error(f"{node['ipv4']}: vulnerable version {node['version']} < 1000002013")
elif node["version"] and node["version"] < sVER_MIN: elif node["version"] and node["version"] < sVER_MIN:
LOG.warn(f"outdated version {node['version']} < {sVER_MIN}") LOG.warn(f"{node['ipv4']}: outdated version {node['version']} < {sVER_MIN}")
# Put the onion address in the location after the country code # Put the onion address in the location after the country code
if len(node["location"]) not in [2, 65]: if len(node["location"]) not in [2, 65]:
LOG.warn(f"location {location} should be a 2 digit country code, or 'code onion'") LOG.warn(f"{node['ipv4']}: location {node['location']} should be a 2 digit country code, or 'code onion'")
elif len(node["location"]) == 65 and \ elif len(node["location"]) == 65 and \
not node["location"].endswith('.onion') : not node["location"].endswith('.onion'):
LOG.warn(f"location {location} should be a 2 digit country code 'code onion'") LOG.warn(f"{node['ipv4']}: location {node['location']} should be a 2 digit country code 'code onion'")
elif len(node["location"]) == 65 and \ elif len(node["location"]) == 65 and \
node["location"].endswith('.onion') and bHAVE_TOR: node["location"].endswith('.onion') and bHAVE_TOR:
onion = node["location"][3:] onion = node["location"][3:]
if bHAVE_TOR and bAreWeConnected and bAreWeConnected() \ if bHAVE_TOR and bAreWeConnected and bAreWeConnected() \
and (not node[ipv] in lNONES and and (not node[ipv] in lNONES and
not node[ipv] in lNONES ): not node[ipv] in lNONES):
# torresolve the onion # torresolve the onion
# Fixme - see if tor is running # Fixme - see if tor is running
try: try:
@ -880,12 +920,12 @@ def lNodesCheckNodes(json_nodes, oArgs, bClean=False):
pass pass
else: else:
if s: if s:
LOG.info(f"Found an onion that resolves to {s}") LOG.info(f"{node['ipv4']}: Found an onion that resolves to {s}")
else: else:
LOG.warn(f"Found an onion that resolves to {s}") LOG.warn(f"{node['ipv4']}: Found an onion that resolves to {s}")
if node['last_ping'] and time.time() - node['last_ping'] > iOLD_SECS: if node['last_ping'] and time.time() - node['last_ping'] > iOLD_SECS:
LOG.debug(f"node has not been pinged in more than 3 months") LOG.debug(f"{node['ipv4']}: node has not pinged in more than 3 months")
# suggestions YMMV # suggestions YMMV
@ -895,22 +935,25 @@ def lNodesCheckNodes(json_nodes, oArgs, bClean=False):
if not node['motd']: if not node['motd']:
# LOG.info(f"Maybe put a ToxID: in motd so people can contact you.") # LOG.info(f"Maybe put a ToxID: in motd so people can contact you.")
pass pass
if bClean and not nth in lErrs: if bClean and nth not in lErrs:
lNew+=[new_node] lNew += [new_node]
nth += 1 nth += 1
# fixme look for /etc/tor/torrc but it may not be readable # fixme look for /etc/tor/torrc but it may not be readable
if bHAVE_TOR and os.path.exists('/etc/tor/torrc'): if bHAVE_TOR and os.path.exists('/etc/tor/torrc'):
# get from torrc
address_range = '172.16.1' # 127.192.0
# print(sBLURB) # print(sBLURB)
LOG.info("Add this section to your /etc/tor/torrc") LOG.info("Add this section to your /etc/tor/torrc")
for line in lONION_CONFIG['vadr']: for line in lONION_CONFIG['vadr']:
print(line) print(line)
if lOnions: if lOnions:
LOG.info("Add this section to your /etc/tor/torrc") LOG.info("Add this section to your /etc/tor/torrc")
print('VirtualAddrNetwork {address_range}.0/10')
i = 1 i = 1
for line in lOnions: for line in lOnions:
hosts = line.split(':') hosts = line.split(':')
print(f"MapAddress {hosts[0]} 172.16.1.{i}") print(f"MapAddress {hosts[0]} {address_range}.{i}")
i += 1 i += 1
if bClean: if bClean:
@ -925,7 +968,7 @@ def iNodesFileCheck(sProOrNodes, oArgs, bClean=False):
with open(sProOrNodes, 'rt') as fl: with open(sProOrNodes, 'rt') as fl:
json_all = json.loads(fl.read()) json_all = json.loads(fl.read())
json_nodes = json_all['nodes'] json_nodes = json_all['nodes']
except Exception as e: except Exception as e: # noqa
LOG.exception(f"{oArgs.command} error reading {sProOrNodes}") LOG.exception(f"{oArgs.command} error reading {sProOrNodes}")
return 1 return 1
@ -940,17 +983,17 @@ def iNodesFileCheck(sProOrNodes, oArgs, bClean=False):
aOut = dict(last_scan=json_all['last_scan'], aOut = dict(last_scan=json_all['last_scan'],
last_refresh=now, last_refresh=now,
nodes=al) nodes=al)
sOut = oArgs.output sout = oArgs.output
try: try:
LOG.debug(f"iNodesFileClean saving to {sOut}") LOG.debug(f"iNodesFileClean saving to {sout}")
oStream = open(sOut, 'wt', encoding=sENC) oStream = open(sout, 'wt', encoding=sENC)
json.dump(aOut, oStream, indent=oArgs.indent) json.dump(aOut, oStream, indent=oArgs.indent)
if oStream.write('\n') > 0: i = 0 if oStream.write('\n') > 0: i = 0
except Exception as e: except Exception as e: # noqa
LOG.exception(f"iNodesFileClean error dumping JSON to {sOUT}") LOG.exception(f"iNodesFileClean error dumping JSON to {sout}")
return 3 return 3
except Exception as e: except Exception as e: # noqa
LOG.exception(f"iNodesFileCheck error checking JSON") LOG.exception(f"iNodesFileCheck error checking JSON")
i = -2 i = -2
else: else:
@ -965,67 +1008,67 @@ def iNodesFileClean(sProOrNodes):
return 0 return 0
f = "DHTNodes.clean" f = "DHTNodes.clean"
if not oArgs.output: if not oArgs.output:
sOut = os.path.join(sDIR, f) sout = os.path.join(sDIR, f)
else: else:
sOut = oArgs.output sout = oArgs.output
try: try:
LOG.debug(f"iNodesFileClean saving to {sOut}") LOG.debug(f"iNodesFileClean saving to {sout}")
oStream = open(sOut, 'wt', encoding=sENC) oStream = open(sout, 'wt', encoding=sENC)
json.dump(aOUT, oStream, indent=oArgs.indent) json.dump(aOUT, oStream, indent=oArgs.indent)
if oStream.write('\n') > 0: iRet = 0 if oStream.write('\n') > 0: iret = 0
except Exception as e: except Exception as e: # noqa
LOG.exception(f"iNodesFileClean error dumping JSON to {sOUT}") LOG.exception(f"iNodesFileClean error dumping JSON to {sout}")
return 3 return 3
LOG.info(f"{oArgs.info}ing iRet={iRet} to {oArgs.output}") LOG.info(f"{oArgs.info}ing iret={iret} to {oArgs.output}")
return 0 return 0
def iOsSystemNmapUdp(l, oArgs): def iOsSystemNmapUdp(l, oArgs):
iErrs = 0 ierrs = 0
for elt in l: for elt in l:
cmd = f"sudo nmap -Pn -n -sU -p U:{elt['Port']} {elt['Ip']}" cmd = f"sudo nmap -Pn -n -sU -p U:{elt['Port']} {elt['Ip']}"
LOG.debug(f"{oArgs.info} {cmd} to {oArgs.output}") LOG.debug(f"{oArgs.info} {cmd} to {oArgs.output}")
iErrs += os.system(cmd +f" >> {oArgs.output} 2>&1") ierrs += os.system(cmd +f" >> {oArgs.output} 2>&1")
if iErrs: if ierrs:
LOG.warn(f"{oArgs.info} {iErrs} ERRORs to {oArgs.output}") LOG.warn(f"{oArgs.info} {ierrs} ERRORs to {oArgs.output}")
else: else:
LOG.info(f"{oArgs.info} NO errors to {oArgs.output}") LOG.info(f"{oArgs.info} NO errors to {oArgs.output}")
lRet = lParseNapOutput(oArgs.output) lRet = lParseNapOutput(oArgs.output)
if lRet: if lRet:
for sLine in lRet: for sLine in lRet:
LOG.warn(f"{oArgs.nodes} {sLine}") LOG.warn(f"{oArgs.nodes} {sLine}")
iErr = len(lRet) ierr = len(lRet)
iErrs += iErr ierrs += ierr
return iErrs return ierrs
def iOsSystemNmapTcp(l, oArgs): def iOsSystemNmapTcp(l, oArgs):
iErrs = 0 ierrs = 0
LOG.debug(f"{len(l)} nodes to {oArgs.output}") LOG.debug(f"{len(l)} nodes to {oArgs.output}")
for elt in l: for elt in l:
cmd = f"sudo nmap -Pn -n -sT -p T:{elt['Port']} {elt['Ip']}" cmd = f"sudo nmap -Pn -n -sT -p T:{elt['Port']} {elt['Ip']}"
LOG.debug(f"iOsSystemNmapTcp {cmd} to {oArgs.output}") LOG.debug(f"iOsSystemNmapTcp {cmd} to {oArgs.output}")
iErr += os.system(cmd +f" >> {oArgs.output} 2>&1") ierr = os.system(cmd +f" >> {oArgs.output} 2>&1")
if iErr: if ierr:
LOG.warn(f"iOsSystemNmapTcp {iErrs} ERRORs to {oArgs.output}") LOG.warn(f"iOsSystemNmapTcp {ierrs} ERRORs to {oArgs.output}")
else: else:
lRet = lParseNapOutput(oArgs.output) lRet = lParseNapOutput(oArgs.output)
if lRet: if lRet:
for sLine in lRet: for sLine in lRet:
LOG.warn(f"{oArgs.nodes} {sLine}") LOG.warn(f"{oArgs.nodes} {sLine}")
iErr = len(lRet) ierr = len(lRet)
iErrs += iErr ierrs += ierr
return iErrs return ierrs
def vSetupLogging(loglevel=logging.DEBUG): def vSetupLogging(log_level=logging.DEBUG):
global LOG global LOG
if coloredlogs: if coloredlogs:
aKw = dict(level=loglevel, aKw = dict(level=log_level,
logger=LOG, logger=LOG,
fmt='%(name)s %(levelname)s %(message)s') fmt='%(name)s %(levelname)s %(message)s')
coloredlogs.install(**aKw) coloredlogs.install(**aKw)
else: else:
aKw = dict(level=loglevel, aKw = dict(level=log_level,
format='%(name)s %(levelname)-4s %(message)s') format='%(name)s %(levelname)-4s %(message)s')
logging.basicConfig(**aKw) logging.basicConfig(**aKw)
@ -1042,7 +1085,7 @@ def iTestTorConfig(sProOrNodes, oArgs, bClean=False):
for key,val in lONION_CONFIG.items(): for key,val in lONION_CONFIG.items():
for line in val: for line in val:
if line.startswith('#'): continue if line.startswith('#'): continue
if not line in lEtcTorrc: if line not in lEtcTorrc:
print(line) print(line)
# add_mapaddress # add_mapaddress
if bClean == False: if bClean == False:
@ -1057,6 +1100,23 @@ def iTestTorConfig(sProOrNodes, oArgs, bClean=False):
# add_bootstrap # add_bootstrap
return 0 return 0
def iTestTorExits(sProOrNodes, oArgs, bClean=False):
LOG.info(f"iTestTorExits")
# import pdb; pdb.set_trace()
# sProOrNodes
try:
if hasattr(ts, 'oSTEM_CONTROLER') and ts.oSTEM_CONTROLER \
and ts.oSTEM_CONTROLER.is_set('ExcludeExitNodes'):
LOG_info(f"ExcludeExitNodes is set so we cant continue")
return 0
LOG_info(f"ExcludeExitNodes is not set so we can continue")
l = ts.lExitExcluder(iPort=9051)
except Exception as e:
LOG.error(f"ExcludeExitNodes errored {e}")
return 1
return 0
def iTestTorTest(sProOrNodes, oArgs, bClean=False): def iTestTorTest(sProOrNodes, oArgs, bClean=False):
# test_onion # test_onion
# check_mapaddress # check_mapaddress
@ -1064,7 +1124,7 @@ def iTestTorTest(sProOrNodes, oArgs, bClean=False):
LOG.info(f"iTestTorTest {sProOrNodes}") LOG.info(f"iTestTorTest {sProOrNodes}")
for elt in lONION_NODES: for elt in lONION_NODES:
for line in elt['onions']: for line in elt['onions']:
host,port = line.split(':') (host, port,) = line.split(':')
LOG.debug(f"iTestTorTest resolving {host}") LOG.debug(f"iTestTorTest resolving {host}")
ip = ts.sTorResolve(host) ip = ts.sTorResolve(host)
if ip: LOG.info(f"{host} resolved to {ip}") if ip: LOG.info(f"{host} resolved to {ip}")
@ -1076,7 +1136,7 @@ def iTestTorTest(sProOrNodes, oArgs, bClean=False):
def iTestOnionNodes(): def iTestOnionNodes():
return 0 return 0
def iMain(sProOrNodes, oArgs): def iMainFun(sProOrNodes, oArgs):
global bOUT, aOUT, sENC global bOUT, aOUT, sENC
global bSAVE global bSAVE
@ -1099,12 +1159,12 @@ def iMain(sProOrNodes, oArgs):
if oArgs.command == 'decrypt': if oArgs.command == 'decrypt':
assert oArgs.output, "--output required for this command" assert oArgs.output, "--output required for this command"
oStream = open(oArgs.output, 'wb') oStream = open(oArgs.output, 'wb')
iRet = oStream.write(bSAVE) iret = oStream.write(bSAVE)
LOG.info(f"Wrote {iRet} to {oArgs.output}") LOG.info(f"Wrote {iret} to {oArgs.output}")
iRet = 0 iret = 0
elif oArgs.command == 'nodes': elif oArgs.command == 'nodes':
iRet = -1 iret = -1
ep_sec = str(int(time.time())) ep_sec = str(int(time.time()))
json_head = '{"last_scan":' +ep_sec \ json_head = '{"last_scan":' +ep_sec \
+',"last_refresh":' +ep_sec \ +',"last_refresh":' +ep_sec \
@ -1115,7 +1175,7 @@ def iMain(sProOrNodes, oArgs):
with open(oArgs.output, 'wt') as oFd: with open(oArgs.output, 'wt') as oFd:
oFd.write(json_head) oFd.write(json_head)
cmd = f"cat '{sProOrNodes}' | jq '.|with_entries(select(.key|match(\"nodes\"))).nodes[]|select(.status_tcp)|select(.ipv4|match(\".\"))' " cmd = f"cat '{sProOrNodes}' | jq '.|with_entries(select(.key|match(\"nodes\"))).nodes[]|select(.status_tcp)|select(.ipv4|match(\".\"))' "
iRet = os.system(cmd +"| sed -e '2,$s/^{/,{/'" +f" >>{oArgs.output}") iret = os.system(cmd +"| sed -e '2,$s/^{/,{/'" +f" >>{oArgs.output}")
with open(oArgs.output, 'at') as oFd: oFd.write(']}\n') with open(oArgs.output, 'at') as oFd: oFd.write(']}\n')
elif oArgs.nodes == 'select_udp': elif oArgs.nodes == 'select_udp':
@ -1124,7 +1184,7 @@ def iMain(sProOrNodes, oArgs):
with open(oArgs.output, 'wt') as oFd: with open(oArgs.output, 'wt') as oFd:
oFd.write(json_head) oFd.write(json_head)
cmd = f"cat '{sProOrNodes}' | jq '.|with_entries(select(.key|match(\"nodes\"))).nodes[]|select(.status_udp)|select(.ipv4|match(\".\"))' " cmd = f"cat '{sProOrNodes}' | jq '.|with_entries(select(.key|match(\"nodes\"))).nodes[]|select(.status_udp)|select(.ipv4|match(\".\"))' "
iRet = os.system(cmd +"| sed -e '2,$s/^{/,{/'" +f" >>{oArgs.output}") iret = os.system(cmd +"| sed -e '2,$s/^{/,{/'" +f" >>{oArgs.output}")
with open(oArgs.output, 'at') as oFd: oFd.write(']}\n') with open(oArgs.output, 'at') as oFd: oFd.write(']}\n')
elif oArgs.nodes == 'select_version': elif oArgs.nodes == 'select_version':
@ -1134,7 +1194,7 @@ def iMain(sProOrNodes, oArgs):
oFd.write(json_head) oFd.write(json_head)
cmd = f"cat '{sProOrNodes}' | jq '.|with_entries(select(.key|match(\"nodes\"))).nodes[]|select(.status_udp)|select(.version|match(\"{sTOX_VERSION}\"))'" cmd = f"cat '{sProOrNodes}' | jq '.|with_entries(select(.key|match(\"nodes\"))).nodes[]|select(.status_udp)|select(.version|match(\"{sTOX_VERSION}\"))'"
iRet = os.system(cmd +"| sed -e '2,$s/^{/,{/'" +f" >>{oArgs.output}") iret = os.system(cmd +"| sed -e '2,$s/^{/,{/'" +f" >>{oArgs.output}")
with open(oArgs.output, 'at') as oFd: with open(oArgs.output, 'at') as oFd:
oFd.write(']}\n') oFd.write(']}\n')
@ -1146,13 +1206,13 @@ def iMain(sProOrNodes, oArgs):
cmd = sBashFileNmapTcp() cmd = sBashFileNmapTcp()
cmd = f"sudo bash {cmd} < '{sProOrNodes}' >'{oArgs.output}' 2>&1" cmd = f"sudo bash {cmd} < '{sProOrNodes}' >'{oArgs.output}' 2>&1"
LOG.debug(cmd) LOG.debug(cmd)
iRet = os.system(cmd) iret = os.system(cmd)
if iRet == 0: if iret == 0:
lRet = lParseNapOutput(oArgs.output) lRet = lParseNapOutput(oArgs.output)
if lRet: if lRet:
for sLine in lRet: for sLine in lRet:
LOG.warn(f"{oArgs.nodes} {sLine}") LOG.warn(f"{oArgs.nodes} {sLine}")
iRet = len(lRet) iret = len(lRet)
elif oArgs.nodes == 'nmap_udp': elif oArgs.nodes == 'nmap_udp':
assert oArgs.output, "--output required for this command" assert oArgs.output, "--output required for this command"
@ -1163,13 +1223,13 @@ def iMain(sProOrNodes, oArgs):
cmd = vBashFileNmapUdp() cmd = vBashFileNmapUdp()
cmd = f"sudo bash {cmd} < '{sProOrNodes}'" +f" >'{oArgs.output}' 2>&1" cmd = f"sudo bash {cmd} < '{sProOrNodes}'" +f" >'{oArgs.output}' 2>&1"
LOG.debug(cmd) LOG.debug(cmd)
iRet = os.system(cmd) iret = os.system(cmd)
if iRet == 0: if iret == 0:
lRet = lParseNapOutput(oArgs.output) lRet = lParseNapOutput(oArgs.output)
if lRet: if lRet:
for sLine in lRet: for sLine in lRet:
LOG.warn(f"{oArgs.nodes} {sLine}") LOG.warn(f"{oArgs.nodes} {sLine}")
iRet = len(lRet) iret = len(lRet)
elif oArgs.nodes == 'download' and download_url: elif oArgs.nodes == 'download' and download_url:
if not bAreWeConnected(): if not bAreWeConnected():
@ -1178,7 +1238,7 @@ def iMain(sProOrNodes, oArgs):
b = download_url(url) b = download_url(url)
if not b: if not b:
LOG.warn("failed downloading list of nodes") LOG.warn("failed downloading list of nodes")
iRet = -1 iret = -1
else: else:
if oArgs.output: if oArgs.output:
oStream = open(oArgs.output, 'wb') oStream = open(oArgs.output, 'wb')
@ -1186,23 +1246,23 @@ def iMain(sProOrNodes, oArgs):
else: else:
oStream = sys.stdout oStream = sys.stdout
oStream.write(str(b, sENC)) oStream.write(str(b, sENC))
iRet = 0 iret = 0
LOG.info(f"downloaded list of nodes to {oStream}") LOG.info(f"downloaded list of nodes to {oStream}")
elif oArgs.nodes == 'check': elif oArgs.nodes == 'check':
i = iNodesFileCheck(sProOrNodes, oArgs, bClean=False) i = iNodesFileCheck(sProOrNodes, oArgs, bClean=False)
iRet = i iret = i
elif oArgs.nodes == 'clean': elif oArgs.nodes == 'clean':
assert oArgs.output, "--output required for this command" assert oArgs.output, "--output required for this command"
i = iNodesFileCheck(sProOrNodes, oArgs, bClean=True) i = iNodesFileCheck(sProOrNodes, oArgs, bClean=True)
iRet = i iret = i
if iRet > 0: if iret > 0:
LOG.warn(f"{oArgs.nodes} iRet={iRet} to {oArgs.output}") LOG.warn(f"{oArgs.nodes} iret={iret} to {oArgs.output}")
elif iRet == 0: elif iret == 0:
LOG.info(f"{oArgs.nodes} iRet={iRet} to {oArgs.output}") LOG.info(f"{oArgs.nodes} iret={iret} to {oArgs.output}")
elif oArgs.command == 'onions': elif oArgs.command == 'onions':
@ -1210,11 +1270,17 @@ def iMain(sProOrNodes, oArgs):
if oArgs.onions == 'config': if oArgs.onions == 'config':
i = iTestTorConfig(sProOrNodes, oArgs) i = iTestTorConfig(sProOrNodes, oArgs)
iRet = i iret = i
elif oArgs.onions == 'test': elif oArgs.onions == 'test':
i = iTestTorTest(sProOrNodes, oArgs) i = iTestTorTest(sProOrNodes, oArgs)
iRet = i iret = i
elif oArgs.onions == 'exits':
i = iTestTorExits(sProOrNodes, oArgs)
iret = i
else:
RuntimeError(oArgs.onions)
elif oArgs.command in ['info', 'edit']: elif oArgs.command in ['info', 'edit']:
@ -1231,11 +1297,11 @@ def iMain(sProOrNodes, oArgs):
assert bSAVE[:8] == bMARK, "Not a Tox profile" assert bSAVE[:8] == bMARK, "Not a Tox profile"
bOUT = bMARK bOUT = bMARK
iErrs = 0 iret = 0
process_chunk(len(bOUT), bSAVE, oArgs) process_chunk(len(bOUT), bSAVE, oArgs)
if not bOUT: if not bOUT:
LOG.error(f"{oArgs.command} NO bOUT results") LOG.error(f"{oArgs.command} NO bOUT results")
iRet = 1 iret = 1
else: else:
oStream = None oStream = None
LOG.debug(f"command={oArgs.command} len bOUT={len(bOUT)} results") LOG.debug(f"command={oArgs.command} len bOUT={len(bOUT)} results")
@ -1243,15 +1309,15 @@ def iMain(sProOrNodes, oArgs):
if oArgs.command in ['edit'] or oArgs.info in ['save']: if oArgs.command in ['edit'] or oArgs.info in ['save']:
LOG.debug(f"{oArgs.command} saving to {oArgs.output}") LOG.debug(f"{oArgs.command} saving to {oArgs.output}")
oStream = open(oArgs.output, 'wb', encoding=None) oStream = open(oArgs.output, 'wb', encoding=None)
if oStream.write(bOUT) > 0: iRet = 0 if oStream.write(bOUT) > 0: iret = 0
LOG.info(f"{oArgs.info}ed iRet={iRet} to {oArgs.output}") LOG.info(f"{oArgs.info}ed iret={iret} to {oArgs.output}")
elif oArgs.info == 'info': elif oArgs.info == 'info':
pass pass
iRet = 0 iret = 0
elif oArgs.info == 'yaml': elif oArgs.info == 'yaml':
if not yaml: if not yaml:
LOG.warn(f"{oArgs.command} no yaml support") LOG.warn(f"{oArgs.command} no yaml support")
iRet = -1 iret = -1
else: else:
LOG.debug(f"{oArgs.command} saving to {oArgs.output}") LOG.debug(f"{oArgs.command} saving to {oArgs.output}")
oStream = open(oArgs.output, 'wt', encoding=sENC) oStream = open(oArgs.output, 'wt', encoding=sENC)
@ -1262,13 +1328,13 @@ def iMain(sProOrNodes, oArgs):
LOG.warn(f'WARN: {e}') LOG.warn(f'WARN: {e}')
else: else:
oStream.write('\n') oStream.write('\n')
iRet = 0 iret = 0
LOG.info(f"{oArgs.info}ing iRet={iRet} to {oArgs.output}") LOG.info(f"{oArgs.info}ing iret={iret} to {oArgs.output}")
elif oArgs.info == 'json': elif oArgs.info == 'json':
if not yaml: if not yaml:
LOG.warn(f"{oArgs.command} no json support") LOG.warn(f"{oArgs.command} no json support")
iRet = -1 iret = -1
else: else:
LOG.debug(f"{oArgs.command} saving to {oArgs.output}") LOG.debug(f"{oArgs.command} saving to {oArgs.output}")
oStream = open(oArgs.output, 'wt', encoding=sENC) oStream = open(oArgs.output, 'wt', encoding=sENC)
@ -1277,56 +1343,56 @@ def iMain(sProOrNodes, oArgs):
except: except:
LOG.warn("There are somtimes problems with the json info dump of bytes keys: ```TypeError: Object of type bytes is not JSON serializable```") LOG.warn("There are somtimes problems with the json info dump of bytes keys: ```TypeError: Object of type bytes is not JSON serializable```")
oStream.write('\n') > 0 oStream.write('\n') > 0
iRet = 0 iret = 0
LOG.info(f"{oArgs.info}ing iRet={iRet} to {oArgs.output}") LOG.info(f"{oArgs.info}ing iret={iret} to {oArgs.output}")
elif oArgs.info == 'repr': elif oArgs.info == 'repr':
LOG.debug(f"{oArgs.command} saving to {oArgs.output}") LOG.debug(f"{oArgs.command} saving to {oArgs.output}")
oStream = open(oArgs.output, 'wt', encoding=sENC) oStream = open(oArgs.output, 'wt', encoding=sENC)
if oStream.write(repr(bOUT)) > 0: iRet = 0 if oStream.write(repr(bOUT)) > 0: iret = 0
if oStream.write('\n') > 0: iRet = 0 if oStream.write('\n') > 0: iret = 0
LOG.info(f"{oArgs.info}ing iRet={iRet} to {oArgs.output}") LOG.info(f"{oArgs.info}ing iret={iret} to {oArgs.output}")
elif oArgs.info == 'pprint': elif oArgs.info == 'pprint':
LOG.debug(f"{oArgs.command} saving to {oArgs.output}") LOG.debug(f"{oArgs.command} saving to {oArgs.output}")
oStream = open(oArgs.output, 'wt', encoding=sENC) oStream = open(oArgs.output, 'wt', encoding=sENC)
pprint(aOUT, stream=oStream, indent=oArgs.indent, width=80) pprint(aOUT, stream=oStream, indent=oArgs.indent, width=80)
iRet = 0 iret = 0
LOG.info(f"{oArgs.info}ing iRet={iRet} to {oArgs.output}") LOG.info(f"{oArgs.info}ing iret={iret} to {oArgs.output}")
elif oArgs.info == 'nmap_relay': elif oArgs.info == 'nmap_relay':
assert bHAVE_NMAP, "nmap is required for this command" assert bHAVE_NMAP, "nmap is required for this command"
assert oArgs.output, "--output required for this command" assert oArgs.output, "--output required for this command"
if aOUT["TCP_RELAY"]: if aOUT["TCP_RELAY"]:
iRet = iOsSystemNmapTcp(aOUT["TCP_RELAY"], oArgs) iret = iOsSystemNmapTcp(aOUT["TCP_RELAY"], oArgs)
else: else:
LOG.warn(f"{oArgs.info} no TCP_RELAY") LOG.warn(f"{oArgs.info} no TCP_RELAY")
iRet = 0 iret = 0
elif oArgs.info == 'nmap_dht': elif oArgs.info == 'nmap_dht':
assert bHAVE_NMAP, "nmap is required for this command" assert bHAVE_NMAP, "nmap is required for this command"
assert oArgs.output, "--output required for this command" assert oArgs.output, "--output required for this command"
if aOUT["DHT"]: if aOUT["DHT"]:
iRet = iOsSystemNmapUdp(aOUT["DHT"], oArgs) iret = iOsSystemNmapUdp(aOUT["DHT"], oArgs)
else: else:
LOG.warn(f"{oArgs.info} no DHT") LOG.warn(f"{oArgs.info} no DHT")
iRet = 0 iret = 0
elif oArgs.info == 'nmap_path': elif oArgs.info == 'nmap_path':
assert bHAVE_NMAP, "nmap is required for this command" assert bHAVE_NMAP, "nmap is required for this command"
assert oArgs.output, "--output required for this command" assert oArgs.output, "--output required for this command"
if aOUT["PATH_NODE"]: if aOUT["PATH_NODE"]:
iRet = iOsSystemNmapUdp(aOUT["PATH_NODE"], oArgs) iret = iOsSystemNmapUdp(aOUT["PATH_NODE"], oArgs)
else: else:
LOG.warn(f"{oArgs.info} no PATH_NODE") LOG.warn(f"{oArgs.info} no PATH_NODE")
iRet = 0 iret = 0
else: else:
LOG.warn(f"{oArgs.command} UNREGOGNIZED") LOG.warn(f"{oArgs.command} UNREGOGNIZED")
if oStream and oStream != sys.stdout and oStream != sys.stderr: if oStream and oStream != sys.stdout and oStream != sys.stderr:
oStream.close() oStream.close()
return iRet return iret
def oMainArgparser(_=None): def oMainArgparser(_=None):
if not os.path.exists('/proc/sys/net/ipv6'): if not os.path.exists('/proc/sys/net/ipv6'):
@ -1349,7 +1415,7 @@ def oMainArgparser(_=None):
help='comma seperated SECTION,num,key,value - or help for ') help='comma seperated SECTION,num,key,value - or help for ')
parser.add_argument('--indent', type=int, default=2, parser.add_argument('--indent', type=int, default=2,
help='Indent for yaml/json/pprint') help='Indent for yaml/json/pprint')
choices=['info', 'save', 'repr', 'yaml','json', 'pprint'] choices = ['info', 'save', 'repr', 'yaml','json', 'pprint']
if bHAVE_NMAP: if bHAVE_NMAP:
choices += ['nmap_relay', 'nmap_dht', 'nmap_path'] choices += ['nmap_relay', 'nmap_dht', 'nmap_path']
parser.add_argument('--info', type=str, default='info', parser.add_argument('--info', type=str, default='info',
@ -1389,8 +1455,9 @@ def oMainArgparser(_=None):
return parser return parser
if __name__ == '__main__': def iMain(lArgv=None):
lArgv = sys.argv[1:] global iLOG_LEVEL
if lArgv is None: lArgv = sys.argv[1:]
parser = oMainArgparser() parser = oMainArgparser()
oArgs = parser.parse_args(lArgv) oArgs = parser.parse_args(lArgv)
if oArgs.command in ['edit'] and oArgs.edit == 'help': if oArgs.command in ['edit'] and oArgs.edit == 'help':
@ -1401,8 +1468,10 @@ if __name__ == '__main__':
sys.exit(0) sys.exit(0)
vSetupLogging(oArgs.log_level) vSetupLogging(oArgs.log_level)
iLOG_LEVEL = oArgs.log_level
i = 0 i = 0
for sProOrNodes in oArgs.lprofile: for sProOrNodes in oArgs.lprofile:
i = iMain(sProOrNodes, oArgs) i = iMainFun(sProOrNodes, oArgs)
sys.exit(i) if __name__ == '__main__':
sys.exit(iMain(sys.argv[1:]))

24
tox_profile_examples.bash Normal file
View File

@ -0,0 +1,24 @@
#!/bin/sh -e
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
# some examples of tox-profile usage
export PYTHONPATH=/mnt/o/var/local/src/toxygen_wrapper.git
TOX_HOME=$HOME/.config/tox
NMAP_CMD='sudo -u debian-tor nmap'
echo INFO: check the download json file
python3 tox_profile.py --command nodes --nodes check \
$TOX_HOME/DHTnodes.json.new \
2>&1 | tee /tmp/DHTnodes.json.log
echo INFO: get the tcp nodes/ports from the downloaded json file
python3 tox_profile.py --command nodes --nodes select_tcp \
--output /tmp/DHTnodes.json.tcp \
$TOX_HOME/DHTnodes.json.new
echo INFO: run ping/nmap on the tcp nodes/ports from the downloaded json file
python3 tox_profile.py --command nodes --nodes nmap_tcp \
--nmap_cmd $NMAP_CMD \
--output /tmp/DHTnodes.json.tcp.out \
/tmp/DHTnodes.json.tcp

340
tox_profile_test.bash Executable file
View File

@ -0,0 +1,340 @@
#!/bin/sh
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
# tox_profile.py has a lot of features so it needs test coverage
PREFIX=/mnt/o/var/local
ROLE=text
DEBUG=1
EXE=/var/local/bin/python3.bash
WRAPPER=$PREFIX/src/toxygen_wrapper.git
tox=$HOME/.config/tox/toxic_profile.tox
[ -s $tox ] || exit 2
target=$PREFIX/src/tox_profile/tox_profile.py
OUT=/tmp/toxic_profile
ps ax | grep -q tor && netstat -n4le | grep -q :9050
[ $? -eq 0 ] && HAVE_TOR=1 || HAVE_TOR=0
[ -f /usr/local/bin/usr_local_tput.bash ] && \
. /usr/local/bin/usr_local_tput.bash || {
DBUG() { echo DEBUG $* ; }
INFO() { echo INFO $* ; }
WARN() { echo WARN $* ; }
ERROR() { echo ERROR $* ; }
}
if [ -z "$TOXCORE_LIBS" ] && [ ! -d libs ] ; then
mkdir libs
cd libs
# /lib/x86_64-linux-gnu/libtoxcore.so.2
for pro in qtox toxic ; do
if which $pro 2> /dev/null ; then
DBUG linking to $pro libtoxcore
lib=$( ldd `which $pro` | grep libtoxcore|sed -e 's/.* => //' -e 's/ .*//')
[ -n "$lib" -a -f "$lib" ] || { WARN $Lib ; continue ; }
INFO linking to $lib
for elt in libtoxcore.so libtoxav.so libtoxencryptsave.so ; do
ln -s "$lib" "$elt"
done
export TOXCORE_LIBS=$PWD
break
fi
done
cd ..
elif [ -z "$TOXCORE_LIBS" ] && [ -d libs ] ; then
export TOXCORE_LIBS=$PWD/libs
fi
# set -- -e
[ -s $target ] || exit 1
[ -d $WRAPPER ] || {
ERROR wrapper is required https://git.plastiras.org/emdee/toxygen_wrapper
exit 3
}
export PYTHONPATH=$WRAPPER
json=$HOME/.config/tox/DHTnodes.json
[ -s $json ] || exit 4
which jq > /dev/null && HAVE_JQ=1 || HAVE_JQ=0
which nmap > /dev/null && HAVE_NMAP=1 || HAVE_NMAP=0
sudo rm -f $OUT.* /tmp/toxic_nodes.*
test_jq () {
[ $# -eq 3 ] || {
ERROR test_jq '#' "$@"
return 3
}
in=$1
out=$2
err=$3
[ -s $in ] || {
ERROR $i test_jq null $in
return 4
}
jq . < $in >$out 2>$err || {
ERROR $i test_jq $json
return 5
}
grep error: $err && {
ERROR $i test_jq $json
return 6
}
[ -s $out ] || {
ERROR $i null $out
return 7
}
[ -s $err ] || rm -f $err
return 0
}
i=0
[ "$HAVE_JQ" = 0 ] || \
test_jq $json /tmp/toxic_nodes.json /tmp/toxic_nodes.err || {
ERROR test_jq failed on $json
exit ${i}$?
}
[ -f /tmp/toxic_nodes.json ] || cp -p $json /tmp/toxic_nodes.json
json=/tmp/toxic_nodes.json
i=1
# required password
INFO $i decrypt $OUT.bin
$EXE $target --command decrypt --output $OUT.bin $tox || exit ${i}1
[ -s $OUT.bin ] || exit ${i}2
tox=$OUT.bin
INFO $i info $tox
$EXE $target --command info --info info $tox 2>$OUT.info || {
ERROR $i $EXE $target --command info --info info $tox
exit ${i}3
}
[ -s $OUT.info ] || exit ${i}4
INFO $i $EXE $target --command info --info save --output $OUT.save $tox
$EXE $target --command info --info save --output $OUT.save $tox 2>/dev/null || {
ERROR $?
exit ${i}5
}
[ -s $OUT.save ] || exit ${i}6
i=2
[ $# -ne 0 -a $1 -ne $i ] || \
! INFO $i Info and editing || \
for the_tox in $tox $OUT.save ; do
DBUG $i $the_tox
the_base=`echo $the_tox | sed -e 's/.save$//' -e 's/.tox$//'`
for elt in json yaml pprint repr ; do
if [ $elt = yaml -o $elt = json ] ; then
# ModuleNotFoundError
python3 -c "import $elt" 2>/dev/null || continue
fi
INFO $i $the_base.$elt
DBUG $EXE $target \
--command info --info $elt \
--output $the_base.$elt $the_tox '2>'$the_base.$elt.err
$EXE $target --command info --info $elt \
--output $the_base.$elt $the_tox 2>$the_base.$elt.err || {
tail $the_base.$elt.err
if [ $elt != yaml -a $elt != json ] ; then
exit ${i}0
else
WARN $elt
fi
}
[ -s $the_base.$elt ] || {
WARN no output $the_base.$elt
# exit ${i}1
}
done
DBUG $EXE $target --command edit --edit help $the_tox
$EXE $target --command edit --edit help $the_tox 2>/dev/null || exit ${i}2
# edit the status message
INFO $i $the_base.Status_message 'STATUSMESSAGE,.,Status_message,Toxxed on Toxic'
$EXE $target --command edit --edit 'STATUSMESSAGE,.,Status_message,Toxxed on Toxic' \
--output $the_base.Status_message.tox $the_tox 2>&1|grep EDIT || exit ${i}3
[ -s $the_base.Status_message.tox ] || exit ${i}3
$EXE $target --command info $the_base.Status_message.tox 2>&1|grep Toxxed || exit ${i}4
# edit the nick_name
INFO $i $the_base.Nick_name 'NAME,.,Nick_name,FooBar'
$EXE $target --command edit --edit 'NAME,.,Nick_name,FooBar' \
--output $the_base.Nick_name.tox $the_tox 2>&1|grep EDIT || exit ${i}5
[ -s $the_base.Nick_name.tox ] || exit ${i}5
$EXE $target --command info $the_base.Nick_name.tox 2>&1|grep FooBar || exit ${i}6
# set the DHTnodes to empty
INFO $i $the_base.noDHT 'DHT,.,DHTnode,'
$EXE $target --command edit --edit 'DHT,.,DHTnode,' \
--output $the_base.noDHT.tox $the_tox 2>&1|grep EDIT || exit ${i}7
[ -s $the_base.noDHT.tox ] || exit ${i}7
$EXE $target --command info $the_base.noDHT.tox 2>&1 | grep 'NO DHT' || exit ${i}8
done
i=3
[ "$#" -ne 0 -a "$1" != "$i" ] || \
[ "$HAVE_JQ" = 0 ] || \
! INFO $i Nodes || \
for the_json in $json ; do
DBUG $i $the_json
the_base=`echo $the_json | sed -e 's/.json$//' -e 's/.tox$//'`
for nmap in clean check select_tcp select_udp select_version; do
$EXE $target --command nodes --nodes $nmap \
--output $the_base.$nmap.json $the_json || {
WARN $i $the_json $nmap ${i}1
continue
}
[ -s $the_base.$nmap.json ] || {
WARN $i $the_json $nmap ${i}2
continue
}
[ $nmap = select_tcp ] && \
grep '"status_tcp": false' $the_base.$nmap.json && {
WARN $i $the_json $nmap ${i}3
continue
}
[ $nmap = select_udp ] && \
grep '"status_udp": false' $the_base.$nmap.json && {
WARN $i $the_json $nmap ${i}4
continue
}
test_jq $the_base.$nmap.json $the_base.$nmap.json.out /tmp/toxic_nodes.err || {
retval=$?
WARN $i $the_base.$nmap.json 3$?
}
INFO $i $the_base.$nmap
done
done
i=4
##
[ $# -ne 0 -a "$1" -ne $i ] || \
[ "$HAVE_TOR" = 0 ] || \
[ ! -f /etc/tor/torrc ] || \
! INFO $i Onions || \
for the_tox in /etc/tor/torrc ; do
DBUG $i $the_tox
the_base=`echo $OUT.save | sed -e 's/.save$//' -e 's/.tox$//'`
# exits
for slot in config test; do
if [ $slot = exits ] && ! netstat -nle4 | grep -q :9050 ; then
WARN Tor not running
continue
fi
INFO $target --command onions --onions $slot \
--output $the_base.$slot.out $the_tox
DBUG=1 $EXE $target --command onions --onions $slot \
--log_level 10 \
--output $the_base.$slot.out $the_tox|| {
WARN $i $?
continue
}
[ true -o -s $the_base.$slot.out ] || {
WARN $i empty $the_base.$slot.out
continue
}
done
done
# ls -l $OUT.* /tmp/toxic_nodes.*
# DEBUG=0 /usr/local/bin/proxy_ping_test.bash tor || exit 0
ip route | grep ^def || exit 0
i=5
##
the_tox=$tox
[ $# -ne 0 -a "$1" != "$i" ] || \
[ "$HAVE_JQ" = 0 ] || \
[ "$HAVE_NMAP" = 0 ] || \
! INFO $i Making dogfood || \
for the_tox in $tox $OUT.save ; do
DBUG $i $the_tox
the_base=`echo $the_tox | sed -e 's/.save$//' -e 's/.tox$//'`
for nmap in nmap_relay nmap_dht nmap_path ; do
# [ $nmap = select_tcp ] && continue
if [ $nmap = nmap_dht ] && [ $HAVE_TOR = 1 ] ; then
INFO skipping $nmap because HAVE_TOR
continue
fi
INFO $i $the_base.$nmap
DBUG $target --command info --info $nmap \
--output $the_base.$nmap.out $the_tox
$EXE $target --command info --info $nmap \
--output $the_base.$nmap.out $the_tox 2>$the_base.$nmap.err || {
# select_tcp may be empty and jq errors
# exit ${i}1
WARN $i $? $the_base.$nmap.err
tail $the_base.$nmap.err
continue
}
[ -s $the_base.$nmap.out ] || {
WARN $i empty $the_base.$nmap.out
continue
}
done
done
i=6
##
[ $# -ne 0 -a "$1" != "$i" ] || \
[ "$HAVE_JQ" = 0 ] || \
! INFO $i Eating dogfood || \
for the_json in $json ; do
DBUG $i $the_json
the_base=`echo $the_json | sed -e 's/.save$//' -e 's/.json$//'`
for nmap in nmap_tcp nmap_udp ; do
if [ $nmap = nmap_udp ] && [ $HAVE_TOR = 1 ] ; then
INFO skipping $nmap because HAVE_TOR
continue
fi
INFO $i $target --command nodes --nodes $nmap --output $the_base.$nmap
$EXE $target --command nodes --nodes $nmap \
--output $the_base.$nmap $the_json 2>$the_base.$nmap.err || {
WARN $i $the_json $nmap ${i}1
continue
}
[ -s $the_base.$nmap ] || {
ERROR $i $the_json $nmap ${i}2
exit ${i}2
}
done
done
i=7
DBUG $i
$EXE $target --command nodes --nodes download \
--output /tmp/toxic_nodes.new $json || {
ERROR $i $EXE $target --command nodes --nodes download $json
exit ${i}1
}
[ -s /tmp/toxic_nodes.new ] || exit ${i}4
INFO $i downloaded /tmp/toxic_nodes.new
json=/tmp/toxic_nodes.new
[ $# -ne 0 -a "$1" != "$i" ] || \
[ "$HAVE_JQ" = 0 ] || \
jq . < $json >/tmp/toxic_nodes.new.json 2>>/tmp/toxic_nodes.new.json.err || {
ERROR $i jq $json
exit ${i}2
}
INFO $i jq from /tmp/toxic_nodes.new.json
[ $# -ne 0 -a "$1" != "$i" ] || \
[ "$HAVE_JQ" = 0 ] || \
grep error: /tmp/toxic_nodes.new.json.err && {
ERROR $i jq $json
exit ${i}3
}
INFO $i no errors in /tmp/toxic_nodes.new.err
exit 0