This commit is contained in:
SG
2023-05-01 15:10:23 +02:00
commit 34038da82e
6 changed files with 579 additions and 0 deletions

296
.gitignore vendored Normal file
View File

@@ -0,0 +1,296 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# ---> Node
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# iso files
*.iso

2
README.md Normal file
View File

@@ -0,0 +1,2 @@
# Transmat

16
algorithm.md Normal file
View File

@@ -0,0 +1,16 @@
0: **Server** knows no peers
1: **Sender** connects, sends _ANNOUNCE_, waits for **Receiver**, **Sender** is ready to send data
1.1 **Server** knows 1 peer - **Sender**
2: **Receiver** connects, sends _ANNOUNCE_, waits for **Sender**, **Receiver** is ready to receive data
2.2 **Server** knows 2 peers
2.3 **Server** relays _ANNOUNCE_ to **Sender**
3: **Sender** receives _ANNOUNCE_
3.1 **Sender** sends _DATA_ to **Server**
3.2 **Server** relays _DATA_ from **Sender** to **Receiver**
3.3 **Receiver** saves DATA
4: File transfer is complete, **Sender** sends _COMPLETE_ to **Server**
4.1: **Server** relays _COMPLETE_ to **Receiver**
4.2: **Receiver** semds _COMPLETE_ to **Server**
5.1: **Server** removes peers
5.2 **Sender** exits
5.3 **Receiver** exits

9
requirements.txt Normal file
View File

@@ -0,0 +1,9 @@
asyncio==3.4.3
cffi==1.15.1
cryptography==40.0.2
lz4==4.3.2
progress==1.6
pycparser==2.21
pyjson==1.3.0
websockets==11.0.2
xkcdpass==1.19.3

198
transmat.py Executable file
View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python
import asyncio, websockets
import sys, os, base64, argparse, json, pickle
from xkcdpass import xkcd_password as xp
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from progress.bar import Bar
VERSION_NUMBER="0.1"
async def read_chunks(filename, chunk_size = 1024):
with open(filename, "rb") as f:
while True:
chunk = f.read(chunk_size)
if not chunk:
break
yield chunk
def derive_key_from_password(password):
salt_part = password.split('-')[0]
key_part = "-".join(password.split('-')[1:])
digest = hashes.Hash(hashes.SHA256())
digest.update(salt_part.encode('utf-8'))
salt = digest.finalize()
kdf_key_length = 32
kdf_iterations = 1000000
kdf = PBKDF2HMAC(
algorithm = hashes.SHA256(),
length = kdf_key_length,
salt = salt,
iterations = kdf_iterations
)
key = kdf.derive(key_part.encode('utf-8'))
return base64.urlsafe_b64encode(key)
def get_peer_group_id(password):
peer_group_id_part = password.split('-')[0]
digest = hashes.Hash(hashes.SHA256())
digest.update(peer_group_id_part.encode('utf-8'))
peer_group_id = digest.finalize()
return peer_group_id.hex()
def encrypt_chunk(key, chunk):
f = Fernet(key)
encrypted_chunk = f.encrypt(chunk).decode('utf-8')
return encrypted_chunk
def decrypt_chunk(key, chunk):
f = Fernet(key)
return f.decrypt(chunk.encode('utf-8'))
async def send_message(websocket, msg):
await websocket.send(msg)
async def send_msg(ws, msg):
await ws.send(msg)
async def send_encrypted_msg(ws, k, data):
(
msgtype,
peer_group_id,
role,
filename,
chunk_size,
chunk_id,
number_of_chunks,
chunk
) = data
payload = {
"filename": filename,
"chunk_size": chunk_size,
"chunk_id": chunk_id,
"number_of_chunks": number_of_chunks,
"chunk": chunk
}
msg = {
"msgtype": msgtype,
"peer_group_id": peer_group_id,
"role": role,
"payload": encrypt_chunk(k, pickle.dumps(payload))
}
await send_msg(ws, json.dumps(msg))
async def main():
WS_RELAY_SERVER = "wss://transmat.exocortex.ru"
parser = argparse.ArgumentParser()
arg_group = parser.add_mutually_exclusive_group(required=True)
arg_group.add_argument('--receive', '--recv', action='store_true', help='Receive a file from the remote party (mutually exclusive with --send and --relay)')
arg_group.add_argument('--send', type=str, help='Send a file to the remote party (mutually exclusive with --receive and --relay)')
arg_group.add_argument('--version', action='store_true', help="Show version")
#arg_group.add_argument('--relay', action='store_true', help='Run as a Relay server (mutually exclusive with --receive and --send)')
parser.add_argument('--server', type=str, help="Specify the Relay server URL (ignored with --relay)")
parser.add_argument('--password', type=str, help="Specify the shared password (for --receive)")
args = parser.parse_args()
passwd_part = server_part = ""
send_part = f'{sys.argv[0]} --receive '
if args.receive:
role = 'receive'
password = args.password
if args.send and args.password is None:
wordlist = xp.generate_wordlist(wordfile = xp.locate_wordfile(), min_length = 5, max_length = 9)
password = xp.generate_xkcdpassword(wordlist, numwords=4, delimiter = "-", case='capitalize')
passwd_part = f"--password {password}"
if args.receive and args.password is None:
print("Error: --password required when receiving files.")
sys.exit(1)
if args.password:
password = args.password
passwd_part = f"--password {password}"
if args.send:
role = 'send'
file_path = args.send
if args.server:
WS_RELAY_SERVER = args.server
server_part = f'--server {WS_RELAY_SERVER}'
if args.version:
print(f"{sys.argv[0]} ver {VERSION_NUMBER}")
sys.exit(0)
k = derive_key_from_password(password)
peer_group_id = get_peer_group_id(password)
if role == 'send':
print('Run the following command on the remote party:\n\n', send_part, server_part, passwd_part, "\n")
filename = os.path.basename(file_path)
file_size = os.path.getsize(file_path)
chunk_id = 0
chunk_size = 1024 * 512
number_of_chunks = round(file_size / chunk_size)
WS_RELAY_SERVER = WS_RELAY_SERVER.replace('http', 'ws', 1)
async with websockets.connect(WS_RELAY_SERVER) as ws:
msgtype = "announce"
await send_encrypted_msg(ws, k, (msgtype, peer_group_id, role, filename, "", "", number_of_chunks, ""))
while True:
message = await ws.recv()
message = json.loads(message)
if message["msgtype"] == "announce" and message["peer_group_id"] == peer_group_id:
break
bar = Bar('Transferring', max=number_of_chunks, suffix='%(percent).1f%% complete - %(eta_td)s remaining')
msgtype = "data"
async for chunk in read_chunks(file_path, chunk_size):
msg = (msgtype, peer_group_id, role, filename, chunk_size, chunk_id, number_of_chunks, chunk)
await send_encrypted_msg(ws, k, msg)
await asyncio.sleep(0.05)
proceed = await ws.recv()
bar.next()
chunk_id += 1
bar.suffix = '%(percent).1f%% done'
print("\n")
if role =='receive':
async with websockets.connect(WS_RELAY_SERVER) as ws:
msgtype = "announce"
await send_encrypted_msg(ws, k, (msgtype, peer_group_id, role, "", "", "", "", ""))
bar = None
f = None
i = 1
while True:
message = await ws.recv()
message = json.loads(message)
payload = message["payload"]
msg = pickle.loads(decrypt_chunk(k, payload))
if bar is None:
filename = msg["filename"]
number_of_chunks = msg["number_of_chunks"]
bar = Bar('Receiving', max=number_of_chunks, suffix='%(percent).1f%% complete - %(eta_td)s remaining')
if f is None:
f = open(msg["filename"], "wb")
f.write(msg["chunk"])
else:
f.write(msg["chunk"])
msgtype = "proceed"
await send_encrypted_msg(ws, k, (msgtype, peer_group_id, role, "", "", "", "", "")) # request the next chunk from sender
i += 1
bar.next()
if i > msg["number_of_chunks"]:
bar.suffix = '%(percent).1f%% complete'
bar.update()
f.close()
print("\n")
break
asyncio.run(main())

58
transphase.py Executable file
View File

@@ -0,0 +1,58 @@
#!/usr/bin/env python
import asyncio
import websockets
import logging
import json
logging.basicConfig(
format="%(asctime)s %(message)s",
level=logging.INFO,
)
peer_list = {}
class LoggerAdapter(logging.LoggerAdapter):
"""Add connection ID and client IP address to websockets logs."""
def process(self, msg, kwargs):
try:
websocket = kwargs["extra"]["websocket"]
except KeyError:
return msg, kwargs
rip = websocket.remote_address[0]
try:
xff = websocket.request_headers.get("X-Forwarded-For")
except:
xff = "None"
return f"{websocket.id} {rip} {xff}", kwargs
async def handler(websocket):
peer_group_id = None
try:
while not websocket.closed:
message = await websocket.recv()
msg = json.loads(message)
if "peer_group_id" in msg:
peer_group_id = msg["peer_group_id"]
if peer_group_id not in peer_list:
peer_list[peer_group_id] = set() # Create new set for all peers in peer_group_id
peer_list[peer_group_id].add(websocket) # Add peer's socket to peer_group
for peer in peer_list[peer_group_id]:
if peer != websocket:
await peer.send(message)
except websockets.exceptions.ConnectionClosed as e:
pass
finally:
peer_list[peer_group_id].remove(websocket)
if len(peer_list[peer_group_id]) < 1:
peer_list.pop(peer_group_id)
async def main():
async with websockets.serve(
handler, "", 8001, ping_timeout=30,
logger=LoggerAdapter(logging.getLogger("websockets.server"), None),
):
await asyncio.Future() # run forever
if __name__ == "__main__":
asyncio.run(main())