Removed a bunch of build stuff that shouldn't be there.
This commit is contained in:
@@ -1,172 +0,0 @@
|
|||||||
import datetime
|
|
||||||
import pe.app
|
|
||||||
from ZEO.asyncio.server import new_connection
|
|
||||||
from packetserver.common.testing import SimpleDirectoryConnection
|
|
||||||
from packetserver.common import Response, Message, Request, PacketServerConnection, send_response, send_blank_response
|
|
||||||
import ax25
|
|
||||||
import logging
|
|
||||||
import signal
|
|
||||||
import time
|
|
||||||
from threading import Lock
|
|
||||||
from msgpack import Unpacker
|
|
||||||
from msgpack.exceptions import OutOfData
|
|
||||||
from typing import Callable, Self, Union, Optional
|
|
||||||
from traceback import format_exc
|
|
||||||
from os import linesep
|
|
||||||
from shutil import rmtree
|
|
||||||
from threading import Thread
|
|
||||||
|
|
||||||
class Client:
|
|
||||||
def __init__(self, pe_server: str, port: int, client_callsign: str, keep_log=False):
|
|
||||||
if not ax25.Address.valid_call(client_callsign):
|
|
||||||
raise ValueError(f"Provided callsign '{client_callsign}' is invalid.")
|
|
||||||
self.pe_server = pe_server
|
|
||||||
self.pe_port = port
|
|
||||||
self.callsign = client_callsign
|
|
||||||
self.app = pe.app.Application()
|
|
||||||
self.started = False
|
|
||||||
self._connection_locks = {}
|
|
||||||
self.lock_locker = Lock()
|
|
||||||
self.keep_log = keep_log
|
|
||||||
self.request_log = []
|
|
||||||
signal.signal(signal.SIGINT, self.exit_gracefully)
|
|
||||||
signal.signal(signal.SIGTERM, self.exit_gracefully)
|
|
||||||
|
|
||||||
def exit_gracefully(self, signum, frame):
|
|
||||||
self.stop()
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
self.stop()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def connections(self) -> dict:
|
|
||||||
if not self.started:
|
|
||||||
return {}
|
|
||||||
else:
|
|
||||||
return self.app._engine._active_handler._handlers[1]._connection_map._connections
|
|
||||||
|
|
||||||
def connection_exists(self, callsign: str):
|
|
||||||
if not ax25.Address.valid_call(callsign):
|
|
||||||
raise ValueError("Must supply a valid callsign.")
|
|
||||||
callsign = callsign.upper().strip()
|
|
||||||
for key in self.connections.keys():
|
|
||||||
if key.split(":")[1] == callsign:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def connection_callsign(self, callsign: str):
|
|
||||||
if not ax25.Address.valid_call(callsign):
|
|
||||||
raise ValueError("Must supply a valid callsign.")
|
|
||||||
callsign = callsign.upper().strip()
|
|
||||||
for key in self.connections.keys():
|
|
||||||
if key.split(":")[1] == callsign:
|
|
||||||
return self.connections[key]
|
|
||||||
return None
|
|
||||||
|
|
||||||
def connection_for(self, callsign: str) -> Union[PacketServerConnection, SimpleDirectoryConnection]:
|
|
||||||
if not ax25.Address.valid_call(callsign):
|
|
||||||
raise ValueError("Must supply a valid callsign.")
|
|
||||||
callsign = callsign.upper().strip()
|
|
||||||
if self.connection_exists(callsign):
|
|
||||||
return self.connection_callsign(callsign)
|
|
||||||
else:
|
|
||||||
return self.new_connection(callsign)
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.started = False
|
|
||||||
self.clear_connections()
|
|
||||||
self.app.stop()
|
|
||||||
self.connection_map = None
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
self.app.start(self.pe_server, self.pe_port)
|
|
||||||
self.app.register_callsigns(self.callsign)
|
|
||||||
self.connection_map = self.app._engine._active_handler._handlers[1]._connection_map
|
|
||||||
self.started = True
|
|
||||||
|
|
||||||
def clear_connections(self):
|
|
||||||
if self.app._engine is not None:
|
|
||||||
cm = self.app._engine._active_handler._handlers[1]._connection_map
|
|
||||||
for key in cm._connections.keys():
|
|
||||||
cm._connections[key].close()
|
|
||||||
|
|
||||||
def new_connection(self, dest: str) -> Union[PacketServerConnection, SimpleDirectoryConnection]:
|
|
||||||
if not self.started:
|
|
||||||
raise RuntimeError("Must start client before creating connections.")
|
|
||||||
if not ax25.Address.valid_call(dest):
|
|
||||||
raise ValueError(f"Provided destination callsign '{dest}' is invalid.")
|
|
||||||
with self.lock_locker:
|
|
||||||
if dest.upper() not in self._connection_locks:
|
|
||||||
self._connection_locks[dest.upper()] = Lock()
|
|
||||||
with self._connection_locks[dest.upper()]:
|
|
||||||
conn = self.connection_callsign(dest.upper())
|
|
||||||
if conn is not None:
|
|
||||||
return conn
|
|
||||||
|
|
||||||
conn = self.app.open_connection(0, self.callsign, dest.upper())
|
|
||||||
while conn.state.name != "CONNECTED":
|
|
||||||
if conn.state.name in ['DISCONNECTED', 'DISCONNECTING']:
|
|
||||||
raise RuntimeError("Connection disconnected unexpectedly.")
|
|
||||||
time.sleep(.1)
|
|
||||||
logging.debug(f"Connection to {dest} ready.")
|
|
||||||
logging.debug("Allowing connection to stabilize for 8 seconds")
|
|
||||||
time.sleep(8)
|
|
||||||
return conn
|
|
||||||
|
|
||||||
def receive(self, req: Request, conn: Union[PacketServerConnection,SimpleDirectoryConnection], timeout: int = 300):
|
|
||||||
cutoff_date = datetime.datetime.now() + datetime.timedelta(seconds=timeout)
|
|
||||||
logging.debug(f"{datetime.datetime.now()}: Request timeout date is {cutoff_date}")
|
|
||||||
while datetime.datetime.now() < cutoff_date:
|
|
||||||
if conn.state.name != "CONNECTED":
|
|
||||||
logging.error(f"Connection {conn} disconnected.")
|
|
||||||
if self.keep_log:
|
|
||||||
self.request_log.append((req, None))
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
unpacked = conn.data.unpack()
|
|
||||||
except:
|
|
||||||
time.sleep(.1)
|
|
||||||
continue
|
|
||||||
msg = Message.partial_unpack(unpacked)
|
|
||||||
resp = Response(msg)
|
|
||||||
return resp
|
|
||||||
logging.warning(f"{datetime.datetime.now()}: Request {req} timed out.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def send_and_receive(self, req: Request, conn: Union[PacketServerConnection,SimpleDirectoryConnection],
|
|
||||||
timeout: int = 300) -> Optional[Response]:
|
|
||||||
if conn.state.name != "CONNECTED":
|
|
||||||
raise RuntimeError("Connection is not connected.")
|
|
||||||
logging.debug(f"Sending request {req}")
|
|
||||||
dest = conn.remote_callsign.upper()
|
|
||||||
with self.lock_locker:
|
|
||||||
if dest not in self._connection_locks:
|
|
||||||
self._connection_locks[dest] = Lock()
|
|
||||||
with self._connection_locks[dest]:
|
|
||||||
conn.data = Unpacker()
|
|
||||||
conn.send_data(req.pack())
|
|
||||||
resp = self.receive(req, conn, timeout=timeout)
|
|
||||||
self.request_log.append((req, resp))
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def send_receive_callsign(self, req: Request, callsign: str, timeout: int = 300) -> Optional[Response]:
|
|
||||||
return self.send_and_receive(req, self.connection_for(callsign), timeout=timeout)
|
|
||||||
|
|
||||||
def single_connect_send_receive(self, dest: str, req: Request, timeout: int = 300) -> Optional[Response]:
|
|
||||||
conn = self.new_connection(dest)
|
|
||||||
logging.debug("Waiting for connection to be ready.")
|
|
||||||
cutoff_date = datetime.datetime.now() + datetime.timedelta(seconds=timeout)
|
|
||||||
|
|
||||||
while (datetime.datetime.now() < cutoff_date) and (conn.state.name != "CONNECTED"):
|
|
||||||
if conn.state.name in ["DISCONNECTED", "DISCONNECTING"]:
|
|
||||||
logging.error(f"Connection {conn} disconnected.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
remaining_time = int((cutoff_date - datetime.datetime.now()).total_seconds()) + 1
|
|
||||||
if remaining_time <= 0:
|
|
||||||
logging.debug("Connection attempt timed out.")
|
|
||||||
conn.close()
|
|
||||||
return None
|
|
||||||
response = self.send_and_receive(req, conn, timeout=int(remaining_time))
|
|
||||||
conn.close()
|
|
||||||
return response
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
from packetserver.client import Client
|
|
||||||
from packetserver.common import Request, Response, PacketServerConnection
|
|
||||||
from typing import Union, Optional
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
|
|
||||||
class BulletinWrapper:
|
|
||||||
def __init__(self, data: dict):
|
|
||||||
for i in ['author', 'id', 'subject', 'body', 'created_at', 'updated_at']:
|
|
||||||
if i not in data:
|
|
||||||
raise ValueError("Was not given a bulletin dictionary.")
|
|
||||||
self.data = data
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Bulletin {self.id} - {self.author}>"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def id(self) -> int:
|
|
||||||
return self.data['id']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def created(self) -> datetime.datetime:
|
|
||||||
return datetime.datetime.fromisoformat(self.data['created_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def updated(self) -> datetime.datetime:
|
|
||||||
return datetime.datetime.fromisoformat(self.data['updated_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def author(self) -> str:
|
|
||||||
return self.data['author']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def subject(self) -> str:
|
|
||||||
return self.data['subject']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def body(self) -> str:
|
|
||||||
return self.data['body']
|
|
||||||
|
|
||||||
def post_bulletin(client: Client, bbs_callsign: str, subject: str, body: str) -> int:
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "bulletin"
|
|
||||||
req.payload = {'subject': subject, 'body': body}
|
|
||||||
req.method = Request.Method.POST
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 201:
|
|
||||||
raise RuntimeError(f"Posting bulletin failed: {response.status_code}: {response.payload}")
|
|
||||||
return response.payload['bulletin_id']
|
|
||||||
|
|
||||||
def get_bulletin_by_id(client: Client, bbs_callsign: str, bid: int) -> BulletinWrapper:
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "bulletin"
|
|
||||||
req.set_var('id', bid)
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET bulletin {bid} failed: {response.status_code}: {response.payload}")
|
|
||||||
return BulletinWrapper(response.payload)
|
|
||||||
|
|
||||||
def get_bulletins_recent(client: Client, bbs_callsign: str, limit: int = None) -> list[BulletinWrapper]:
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "bulletin"
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
if limit is not None:
|
|
||||||
req.set_var('limit', limit)
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"Listing bulletins failed: {response.status_code}: {response.payload}")
|
|
||||||
out_list = []
|
|
||||||
for b in response.payload:
|
|
||||||
out_list.append(BulletinWrapper(b))
|
|
||||||
return out_list
|
|
||||||
@@ -1,193 +0,0 @@
|
|||||||
import click
|
|
||||||
from packetserver.client.cli.config import get_config, default_app_dir, config_path
|
|
||||||
from packetserver.client.cli.constants import DEFAULT_DB_FILE
|
|
||||||
from packetserver.client import Client
|
|
||||||
from packetserver.common.constants import yes_values
|
|
||||||
from packetserver.common import Request, Response
|
|
||||||
from packetserver.client.cli.util import format_list_dicts, exit_client
|
|
||||||
from packetserver.client.cli.job import job
|
|
||||||
from packetserver.client.cli.object import objects
|
|
||||||
from packetserver.client.cli.message import message
|
|
||||||
import ZODB
|
|
||||||
import ZODB.FileStorage
|
|
||||||
import ax25
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import os.path
|
|
||||||
from pathlib import Path
|
|
||||||
from packetserver.client import Client
|
|
||||||
from packetserver.client import users
|
|
||||||
from packetserver.client.users import get_user_by_username, UserWrapper
|
|
||||||
|
|
||||||
VERSION="0.1.0-alpha"
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.option('--conf', default=config_path(), help='path to configfile')
|
|
||||||
@click.option('--server', '-s', default='', help="server radio callsign to connect to (required)",
|
|
||||||
envvar='PSCLIENT_SERVER')
|
|
||||||
@click.option('--agwpe', '-a', default='', help="AGWPE TNC server address to connect to (config file)",
|
|
||||||
envvar='PSCLIENT_AGWPE')
|
|
||||||
@click.option('--port', '-p', default=0, help="AGWPE TNC server port to connect to (config file)",
|
|
||||||
envvar='PSCLIENT_PORT')
|
|
||||||
@click.option('--callsign', '-c', default='', help="radio callsign[+ssid] of this client station (config file)",
|
|
||||||
envvar='PSCLIENT_CALLSIGN')
|
|
||||||
@click.option('--keep-log', '-k', is_flag=True, default=False, help="Save local copy of request log after session ends?")
|
|
||||||
@click.version_option(VERSION,"--version", "-v")
|
|
||||||
@click.pass_context
|
|
||||||
def cli(ctx, conf, server, agwpe, port, callsign, keep_log):
|
|
||||||
"""Command line interface for the PacketServer client and server API."""
|
|
||||||
ctx.ensure_object(dict)
|
|
||||||
cfg = get_config(config_file_path=conf)
|
|
||||||
|
|
||||||
ctx.obj['keep_log'] = False
|
|
||||||
if keep_log:
|
|
||||||
ctx.obj['keep_log'] = True
|
|
||||||
else:
|
|
||||||
if cfg['cli'].get('keep_log', fallback='n') in yes_values:
|
|
||||||
ctx.obj['keep_log'] = True
|
|
||||||
|
|
||||||
if callsign.strip() != '':
|
|
||||||
ctx.obj['callsign'] = callsign.strip().upper()
|
|
||||||
else:
|
|
||||||
if 'callsign' in cfg['cli']:
|
|
||||||
ctx.obj['callsign'] = cfg['cli']['callsign']
|
|
||||||
else:
|
|
||||||
ctx.obj['callsign'] = click.prompt('Please enter your station callsign (with ssid if needed)', type=str)
|
|
||||||
|
|
||||||
ctx.obj['directory'] = cfg['cli']['directory']
|
|
||||||
|
|
||||||
if not ax25.Address.valid_call(ctx.obj['callsign']):
|
|
||||||
click.echo(f"Provided client callsign '{ctx.obj['callsign']}' is invalid.", err=True)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if server.strip() != '':
|
|
||||||
ctx.obj['server'] = server.strip().upper()
|
|
||||||
else:
|
|
||||||
if 'server' in cfg['cli']:
|
|
||||||
ctx.obj['server'] = cfg['cli']['server']
|
|
||||||
else:
|
|
||||||
ctx.obj['server'] = click.prompt('Please enter the bbs station callsign (with ssid if needed)', type=str)
|
|
||||||
|
|
||||||
if not ax25.Address.valid_call(ctx.obj['server']):
|
|
||||||
click.echo(f"Provided remote server callsign '{ctx.obj['server']}' is invalid.", err=True)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if agwpe.strip() != '':
|
|
||||||
ctx.obj['agwpe_server'] = agwpe.strip()
|
|
||||||
else:
|
|
||||||
if 'agwpe_server' in cfg['cli']:
|
|
||||||
ctx.obj['agwpe_server'] = cfg['cli']['agwpe_server']
|
|
||||||
else:
|
|
||||||
ctx.obj['agwpe_server'] = 'localhost'
|
|
||||||
|
|
||||||
if port != 0:
|
|
||||||
ctx.obj['port'] = port
|
|
||||||
else:
|
|
||||||
if 'port' in cfg['cli']:
|
|
||||||
ctx.obj['port'] = int(cfg['cli']['port'])
|
|
||||||
else:
|
|
||||||
ctx.obj['port'] = 8000
|
|
||||||
|
|
||||||
storage = ZODB.FileStorage.FileStorage(os.path.join(cfg['cli']['directory'], DEFAULT_DB_FILE))
|
|
||||||
db = ZODB.DB(storage)
|
|
||||||
if 'TEST_SERVER_DIR' in os.environ:
|
|
||||||
from packetserver.client.testing import TestClient
|
|
||||||
client = TestClient(os.environ['TEST_SERVER_DIR'], ctx.obj['callsign'])
|
|
||||||
else:
|
|
||||||
client = Client(ctx.obj['agwpe_server'], ctx.obj['port'], ctx.obj['callsign'], keep_log=ctx.obj['keep_log'])
|
|
||||||
try:
|
|
||||||
client.start()
|
|
||||||
except Exception as e:
|
|
||||||
click.echo(f"Error connecting to TNC: {str(e)}", err=True)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
ctx.obj['client'] = client
|
|
||||||
ctx.obj['CONFIG'] = cfg
|
|
||||||
ctx.obj['bbs'] = server
|
|
||||||
ctx.obj['db'] = db
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.pass_context
|
|
||||||
def query_server(ctx):
|
|
||||||
"""Query the server for basic info."""
|
|
||||||
client = ctx.obj['client']
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = ""
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
resp = client.send_receive_callsign(req, ctx.obj['bbs'])
|
|
||||||
if resp is None:
|
|
||||||
click.echo(f"No response from {ctx.obj['bbs']}")
|
|
||||||
exit_client(ctx.obj, 1)
|
|
||||||
else:
|
|
||||||
if resp.status_code != 200:
|
|
||||||
exit_client(ctx.obj, 1, message=f"Error contacting server: {resp.payload}")
|
|
||||||
else:
|
|
||||||
click.echo(json.dumps(resp.payload, indent=2))
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.argument('username', required=False, default='')
|
|
||||||
@click.option('--list-users', '-l', is_flag=True, default=False, help="If set, downloads list of all users.")
|
|
||||||
@click.option("--output-format", "-f", default="table", help="Print data as table[default], list, or JSON",
|
|
||||||
type=click.Choice(['table', 'json', 'list'], case_sensitive=False))
|
|
||||||
@click.pass_context
|
|
||||||
def user(ctx, list_users, output_format, username):
|
|
||||||
"""Query users on the BBS. Either listing multiple users or looking up information of USERNAME"""
|
|
||||||
client = ctx.obj['client']
|
|
||||||
# validate args
|
|
||||||
if list_users and (username.strip() != ""):
|
|
||||||
exit_client(ctx.obj,1, "Can't specify a username while listing all users.")
|
|
||||||
|
|
||||||
if not list_users and (username.strip() == ""):
|
|
||||||
exit_client(ctx.obj,1, message="Must provide either a username or --list-users flag.")
|
|
||||||
|
|
||||||
output_objects = []
|
|
||||||
try:
|
|
||||||
if list_users:
|
|
||||||
output_objects = users.get_users(client, ctx.obj['bbs'])
|
|
||||||
else:
|
|
||||||
output_objects.append(users.get_user_by_username(client, ctx.obj['bbs'], username))
|
|
||||||
except Exception as e:
|
|
||||||
exit_client(ctx.obj,1, str(e))
|
|
||||||
finally:
|
|
||||||
client.stop()
|
|
||||||
|
|
||||||
click.echo(format_list_dicts([x.pretty_dict() for x in output_objects], output_format=output_format.lower()))
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option('--email', '-e', type=str, default=None, help="Sets your e-mail address in your profile.")
|
|
||||||
@click.option('--bio', '-b', type=str, default=None, help="Sets your about you text in your profile.")
|
|
||||||
@click.option('--status', '-S', type=str, default=None,
|
|
||||||
help="Sets your status (happy,sad, gone fishing, etc) in your profile.")
|
|
||||||
@click.option('--location', '-l', type=str, default=None,
|
|
||||||
help="Sets your physical location (in whatever form you want) in your profile.")
|
|
||||||
@click.option('--socials', '-m', type=str, default=None,
|
|
||||||
help="Comma (,) separated list of social media or websites you are known by.")
|
|
||||||
@click.pass_context
|
|
||||||
def set_user(ctx, email, bio, status, location, socials):
|
|
||||||
"""Set your user profile settings on the BBS."""
|
|
||||||
social_list = None
|
|
||||||
client = ctx.obj['client']
|
|
||||||
if type(socials) is str:
|
|
||||||
social_list = socials.split(',')
|
|
||||||
|
|
||||||
try:
|
|
||||||
users.update_self(client, ctx.obj['bbs'], email=email, bio=bio, socials=social_list,
|
|
||||||
location=location, status=status)
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
except Exception as e:
|
|
||||||
click.echo(str(e), err=True)
|
|
||||||
exit_client(ctx.obj, 98)
|
|
||||||
|
|
||||||
cli.add_command(user)
|
|
||||||
cli.add_command(query_server)
|
|
||||||
cli.add_command(job, name='job')
|
|
||||||
cli.add_command(objects, name='object')
|
|
||||||
cli.add_command(set_user, name='set')
|
|
||||||
cli.add_command(message)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
cli()
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
import os
|
|
||||||
import os.path
|
|
||||||
from configparser import ConfigParser
|
|
||||||
from pathlib import Path
|
|
||||||
from packetserver.client.cli.constants import DEFAULT_APP_DIR, DEFAULT_CONFIG_FILE
|
|
||||||
|
|
||||||
def default_app_dir() -> str:
|
|
||||||
return os.path.join(str(Path.home()), DEFAULT_APP_DIR)
|
|
||||||
|
|
||||||
def config_path(app_path=default_app_dir()) -> str:
|
|
||||||
return os.path.join(app_path, DEFAULT_CONFIG_FILE)
|
|
||||||
|
|
||||||
def get_config(config_file_path=config_path()) -> ConfigParser:
|
|
||||||
config = ConfigParser()
|
|
||||||
if os.path.isfile(config_file_path):
|
|
||||||
config.read(config_file_path)
|
|
||||||
|
|
||||||
if not 'cli' in config.sections():
|
|
||||||
config.add_section('cli')
|
|
||||||
|
|
||||||
if 'directory' not in config['cli']:
|
|
||||||
config['cli']['directory'] = default_app_dir()
|
|
||||||
|
|
||||||
return config
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
import os.path
|
|
||||||
|
|
||||||
DEFAULT_APP_DIR = ".packetserver"
|
|
||||||
DEFAULT_CONFIG_FILE = "cli.ini"
|
|
||||||
DEFAULT_DB_FILE = "cli-client.zopedb"
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
"""CLI client for dealing with jobs."""
|
|
||||||
import os
|
|
||||||
|
|
||||||
import click
|
|
||||||
from persistent.mapping import default
|
|
||||||
from packetserver.client import Client
|
|
||||||
from packetserver.client.jobs import JobSession, get_job_id, get_user_jobs, send_job, send_job_quick, JobWrapper
|
|
||||||
import datetime
|
|
||||||
from packetserver.client.cli.util import exit_client, format_list_dicts
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.pass_context
|
|
||||||
def job(ctx):
|
|
||||||
"""Runs commands on the BBS server if jobs are enabled on it."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.pass_context
|
|
||||||
def start(ctx):
|
|
||||||
"""Start a job on the BBS server."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.argument('job_id', required=False, type=int)
|
|
||||||
@click.option("--all-jobs", "-a", is_flag=True, default=False, help="Get all of your jobs.")
|
|
||||||
@click.option("--no-data", '-n', is_flag=True, default=True,
|
|
||||||
help="Don't fetch job result data, just metadata.")
|
|
||||||
@click.pass_context
|
|
||||||
def get(ctx, job_id, all_jobs, no_data): # TODO decide what to do with output and artifacts in a cli tool force full JSON?
|
|
||||||
"""Retrieve your jobs. Pass either '-a' or a job_id."""
|
|
||||||
|
|
||||||
fetch_data = not no_data
|
|
||||||
if job_id is None:
|
|
||||||
job_id = ""
|
|
||||||
job_id = job_id.strip()
|
|
||||||
if all_jobs and (job_id != ""):
|
|
||||||
click.echo("Can't use --all and specify a job_id.")
|
|
||||||
|
|
||||||
client = ctx.obj['client']
|
|
||||||
try:
|
|
||||||
if all_jobs:
|
|
||||||
jobs_out = get_user_jobs(client, ctx.obj['bbs'], get_data=fetch_data)
|
|
||||||
else:
|
|
||||||
jobs_out = [get_job_id(client,ctx.obj['bbs'], get_data=fetch_data)]
|
|
||||||
dicts_out = []
|
|
||||||
for j in jobs_out:
|
|
||||||
pass
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
click.echo(str(e), err=True)
|
|
||||||
exit_client(ctx.obj, 1)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option("--transcript", "-T", default="", help="File to write command transcript to if desired.")
|
|
||||||
@click.pass_context
|
|
||||||
def quick_session(ctx, transcript):
|
|
||||||
"""Start a session to submit multiple commands and receive responses immediately"""
|
|
||||||
session_transcript = []
|
|
||||||
client = ctx.obj['client']
|
|
||||||
bbs = ctx.obj['bbs']
|
|
||||||
js = JobSession(client, bbs, stutter=2)
|
|
||||||
db_enabled = True
|
|
||||||
while True:
|
|
||||||
cmd = click.prompt("CMD", prompt_suffix=" >")
|
|
||||||
cmd = cmd.strip()
|
|
||||||
session_transcript.append((datetime.datetime.now(),"c",cmd))
|
|
||||||
next_db = False
|
|
||||||
if db_enabled:
|
|
||||||
next_db = True
|
|
||||||
db_enabled = False
|
|
||||||
if cmd == "":
|
|
||||||
continue
|
|
||||||
if cmd == "/exit":
|
|
||||||
break
|
|
||||||
elif cmd == "/db":
|
|
||||||
click.echo("DB requested for next command.")
|
|
||||||
db_enabled = True
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
job_result = js.send_quick(['bash', '-c', cmd], db=next_db)
|
|
||||||
output = job_result.output_str + "\n"
|
|
||||||
if job_result.errors_str != "":
|
|
||||||
output = output + "Errors: " + job_result.errors_str
|
|
||||||
session_transcript.append((datetime.datetime.now(), "r", output))
|
|
||||||
click.echo(output)
|
|
||||||
except Exception as e:
|
|
||||||
session_transcript.append((datetime.datetime.now(), "e", e))
|
|
||||||
click.echo(f"ERROR! {str(e)}", err=True)
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
if transcript.strip() != "":
|
|
||||||
with open(transcript.strip(), 'w') as tran_file:
|
|
||||||
for l in session_transcript:
|
|
||||||
tran_file.write(f"{l[1]}:{l[0].isoformat()}: {l[2]}{os.linesep}")
|
|
||||||
finally:
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
|
|
||||||
|
|
||||||
job.add_command(quick_session)
|
|
||||||
@@ -1,213 +0,0 @@
|
|||||||
import os
|
|
||||||
import sys
|
|
||||||
import os.path
|
|
||||||
from email.policy import default
|
|
||||||
|
|
||||||
import click
|
|
||||||
from zodbpickle.pickle_3 import FALSE
|
|
||||||
|
|
||||||
from packetserver.client.cli.util import exit_client, format_list_dicts, unit_seconds
|
|
||||||
from copy import deepcopy
|
|
||||||
from uuid import UUID
|
|
||||||
import datetime
|
|
||||||
import re
|
|
||||||
import json
|
|
||||||
from packetserver.client.messages import *
|
|
||||||
|
|
||||||
rel_date = '^-(\\d+)([dhms])$'
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.pass_context
|
|
||||||
def message(ctx):
|
|
||||||
"""Send, search, and filter messages to and from other users on the BBS system."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.argument("recipients", type=str)
|
|
||||||
@click.argument("body", type=str)
|
|
||||||
@click.option("--body-filename", '-f', is_flag=True, default=False, help="Treat body argument as a filename to read body text from. '-' to read from stdin.")
|
|
||||||
@click.option("--attachment", "-A", multiple=True, default=[],
|
|
||||||
help="Files to attach to message in form '[<t|b>:]<filename>' use 't' for text (default), 'b' to interpret file as binary data.")
|
|
||||||
@click.pass_context
|
|
||||||
def send(ctx, recipients, body, body_filename, attachment):
|
|
||||||
"""Send a message to one or more recipients.
|
|
||||||
|
|
||||||
<recipients> should be a comma-separated list of recipients to send the message to
|
|
||||||
|
|
||||||
<body> should be either body text, or a filename (or '-' for stdin) to read body text from
|
|
||||||
"""
|
|
||||||
client = ctx.obj['client']
|
|
||||||
bbs = ctx.obj['bbs']
|
|
||||||
|
|
||||||
recips = [x.strip() for x in recipients.split(",") if x.strip() != ""]
|
|
||||||
|
|
||||||
if len(recips) == 0:
|
|
||||||
click.echo("You must specify at least one recipient.", err=True)
|
|
||||||
exit_client(ctx.obj, 89)
|
|
||||||
|
|
||||||
attachments = []
|
|
||||||
for a in attachment:
|
|
||||||
is_text = True
|
|
||||||
filename = a
|
|
||||||
if len(a) > 1:
|
|
||||||
if a[1] == ":":
|
|
||||||
filename = a[2:]
|
|
||||||
if a[0].lower() == "b":
|
|
||||||
is_text = False
|
|
||||||
try:
|
|
||||||
attachments.append(attachment_from_file(filename, binary=not is_text))
|
|
||||||
except Exception as e:
|
|
||||||
click.echo(str(e), err=True)
|
|
||||||
exit_client(ctx.obj, 89)
|
|
||||||
|
|
||||||
if len(attachments) == 0:
|
|
||||||
attachments = None
|
|
||||||
|
|
||||||
if body_filename:
|
|
||||||
if body == "-":
|
|
||||||
body_text = sys.stdin.read()
|
|
||||||
else:
|
|
||||||
if not os.path.isfile(body):
|
|
||||||
click.echo(f"{body} is not a file that can be read for body text.", err=True)
|
|
||||||
exit_client(ctx.obj, 92)
|
|
||||||
sys.exit(92)
|
|
||||||
try:
|
|
||||||
body_text = open(body, "r").read()
|
|
||||||
except:
|
|
||||||
click.echo(f"{body} is not a file that can be read for body text.", err=True)
|
|
||||||
exit_client(ctx.obj, 92)
|
|
||||||
sys.exit(92)
|
|
||||||
else:
|
|
||||||
body_text = body
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = send_message(client, bbs, body_text, recips, attachments=attachments)
|
|
||||||
click.echo(f"Message received by server: {resp}")
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
except Exception as e:
|
|
||||||
click.echo(f"Error sending message: {str(e)}", err=True)
|
|
||||||
exit_client(ctx.obj, 53)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option("--number", "-n", type=int, default=0,help="Retrieve the first N messages matching filters/sort. 0 for all.")
|
|
||||||
@click.option('--sent', '-S', is_flag=True, default=False, help="Include sent messages in results.")
|
|
||||||
@click.option("--not-received", "-R", is_flag=True, default=False, help="Don't include received messages.")
|
|
||||||
@click.option("--ascending", "-A", is_flag=True, default=False, help="Show older/smaller results first after sorting.")
|
|
||||||
@click.option("--no-attachments", "-N", is_flag=True, default=False, help="Don't fetch attachment data.")
|
|
||||||
@click.option("--uuid", "-u", type=str, default=None, help="If specified, ignore other filters and retrieve only messages matching uuid.")
|
|
||||||
@click.option("--since-date", "-d", type=str, default=None, help="Only include messages since date (iso format), or '-<num><unit Mdyhms>' ex: -5d")
|
|
||||||
@click.option("--output-format", "-f", default="table", help="Print data as table[default], list, or JSON",
|
|
||||||
type=click.Choice(['table', 'json', 'list'], case_sensitive=False))
|
|
||||||
@click.option("--save-copy", "-C", is_flag=True, default=False, help="Save a full copy of each message to fs.")
|
|
||||||
@click.option("--search", "-F", type=str, default="", help="Return only messages containing search string.")
|
|
||||||
@click.option("--no-text", "-T", is_flag=True, default=False, help="Don't return the message text.")
|
|
||||||
@click.option("--sort-by", "-B", default="date", help="Choose to sort by 'date', 'from', or 'to'",
|
|
||||||
type=click.Choice(['date', 'from', 'to'], case_sensitive=False))
|
|
||||||
@click.pass_context
|
|
||||||
def get(ctx, number, sent, not_received, ascending, no_attachments, uuid, since_date, output_format, save_copy,
|
|
||||||
search, no_text, sort_by):
|
|
||||||
client = ctx.obj['client']
|
|
||||||
bbs = ctx.obj['bbs']
|
|
||||||
messages = []
|
|
||||||
get_attach = not no_attachments
|
|
||||||
get_text = not no_text
|
|
||||||
reverse = not ascending
|
|
||||||
if uuid is not None:
|
|
||||||
try:
|
|
||||||
uuid = UUID(uuid)
|
|
||||||
except:
|
|
||||||
exit_client(ctx.obj, 52, message="Must provide a valid UUID.")
|
|
||||||
|
|
||||||
if type(search) is str and (search.strip() == ""):
|
|
||||||
search = None
|
|
||||||
if not_received:
|
|
||||||
if sent:
|
|
||||||
source='sent'
|
|
||||||
else:
|
|
||||||
exit_client(ctx.obj, 23, "Can't exclude both sent and received messages.")
|
|
||||||
else:
|
|
||||||
if sent:
|
|
||||||
source='all'
|
|
||||||
else:
|
|
||||||
source='received'
|
|
||||||
|
|
||||||
if number == 0:
|
|
||||||
limit = None
|
|
||||||
else:
|
|
||||||
limit = number
|
|
||||||
|
|
||||||
if since_date is not None:
|
|
||||||
if len(since_date) < 3:
|
|
||||||
exit_client(ctx.obj, 41, "Invalid date specification.")
|
|
||||||
|
|
||||||
if since_date[0] == "-":
|
|
||||||
m = re.match(rel_date, since_date)
|
|
||||||
if m is None:
|
|
||||||
exit_client(ctx.obj, 41, "Invalid date specification.")
|
|
||||||
else:
|
|
||||||
unit = m.group(2).lower()
|
|
||||||
multiplier = int(m.group(1))
|
|
||||||
if unit not in unit_seconds:
|
|
||||||
exit_client(ctx.obj, 41, "Invalid date specification.")
|
|
||||||
total_seconds = int(multiplier * unit_seconds[unit])
|
|
||||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(seconds=total_seconds)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
cutoff_date = datetime.datetime.fromisoformat(since_date)
|
|
||||||
except:
|
|
||||||
exit_client(ctx.obj, 41, "Invalid date specification.")
|
|
||||||
|
|
||||||
if type(uuid) is UUID:
|
|
||||||
try:
|
|
||||||
messages.append(get_message_uuid(client, bbs, uuid, get_attachments=get_attach))
|
|
||||||
except Exception as e:
|
|
||||||
exit_client(ctx.obj, 40, message=f"Couldn't get message specified: {str(e)}")
|
|
||||||
elif since_date is not None:
|
|
||||||
try:
|
|
||||||
messages = get_messages_since(client, bbs, cutoff_date, get_text=get_text, limit=limit, sort_by=sort_by,
|
|
||||||
reverse=reverse, search=search, get_attachments=get_attach, source=source)
|
|
||||||
except Exception as e:
|
|
||||||
exit_client(ctx.obj, 40, message=f"Couldn't fetch messages: {str(e)}")
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
messages = get_messages(client, bbs, get_text=get_text, limit=limit, sort_by=sort_by, reverse=reverse,
|
|
||||||
search=search, get_attachments=get_attach, source=source)
|
|
||||||
except Exception as e:
|
|
||||||
exit_client(ctx.obj, 40, message=f"Couldn't fetch messages: {str(e)}")
|
|
||||||
|
|
||||||
save_dir = os.path.join(ctx.obj['directory'], 'message_cache')
|
|
||||||
if save_copy:
|
|
||||||
if not os.path.isdir(save_dir):
|
|
||||||
os.mkdir(save_dir)
|
|
||||||
|
|
||||||
message_display = []
|
|
||||||
for msg in messages:
|
|
||||||
json_filename = f"{msg.sent.strftime("%Y%m%d%H%M%s")}-{msg.from_user}.json"
|
|
||||||
json_path = os.path.join(save_dir, json_filename)
|
|
||||||
if save_copy:
|
|
||||||
json.dump(msg.to_dict(json=True), open(json_path, 'w'))
|
|
||||||
d = {
|
|
||||||
'from': msg.from_user,
|
|
||||||
'to': ",".join(msg.to_users),
|
|
||||||
'id': str(msg.msg_id),
|
|
||||||
'text': msg.text,
|
|
||||||
'sent_at': msg.sent.isoformat(),
|
|
||||||
'attachments': "",
|
|
||||||
}
|
|
||||||
if len(msg.attachments) > 0:
|
|
||||||
d['attachments'] = ",".join([a.name for a in msg.attachments])
|
|
||||||
|
|
||||||
if save_copy:
|
|
||||||
d['saved_path'] = json_path
|
|
||||||
message_display.append(d)
|
|
||||||
exit_client(ctx.obj, 0, format_list_dicts(message_display, output_format=output_format))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
message.add_command(get)
|
|
||||||
message.add_command(send)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
import os
|
|
||||||
import os.path
|
|
||||||
import click
|
|
||||||
from packetserver.client.objects import (ObjectWrapper, post_object, post_file,
|
|
||||||
get_user_objects, get_object_by_uuid, delete_object_by_uuid)
|
|
||||||
from packetserver.client.cli.util import exit_client, format_list_dicts
|
|
||||||
from copy import deepcopy
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
@click.pass_context
|
|
||||||
def objects(ctx):
|
|
||||||
"""Manages objects stored on the BBS."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.argument('file_path', required=True, type=str)
|
|
||||||
@click.option("--public", "-P", is_flag=True, default=False, help="Mark the object public for all users.")
|
|
||||||
@click.option("--binary", '-b', is_flag=True, default=False, help="Treat the file as binary instead of text.")
|
|
||||||
@click.option('--name', '-n', type=str, default=None, help="Name of object instead of source filename.")
|
|
||||||
@click.pass_context
|
|
||||||
def upload_file(ctx, file_path, public, name, binary):
|
|
||||||
"""Upload file to object store. Return the assigned UUID."""
|
|
||||||
|
|
||||||
private = not public
|
|
||||||
client = ctx.obj['client']
|
|
||||||
if not os.path.isfile(file_path):
|
|
||||||
click.echo(f"'{file_path}' is not a file.", err=True)
|
|
||||||
exit_client(ctx.obj, 15)
|
|
||||||
|
|
||||||
uuid = post_file(client, ctx.obj['bbs'], file_path, private=private, name=name, binary=binary)
|
|
||||||
click.echo(str(uuid))
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.argument('uuid', required=True, type=str)
|
|
||||||
@click.pass_context
|
|
||||||
def get(ctx, uuid):
|
|
||||||
"""Get an object's data by its UUID."""
|
|
||||||
client = ctx.obj['client']
|
|
||||||
u = ""
|
|
||||||
try:
|
|
||||||
u = UUID(uuid)
|
|
||||||
except ValueError as e:
|
|
||||||
click.echo(f"'{uuid}' is not a valid UUID.", err=True)
|
|
||||||
exit_client(ctx.obj, 13)
|
|
||||||
|
|
||||||
try:
|
|
||||||
obj = get_object_by_uuid(client, ctx.obj['bbs'], u, include_data=True)
|
|
||||||
click.echo(obj.data, nl=False)
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
except Exception as e:
|
|
||||||
click.echo(e, err=True)
|
|
||||||
exit_client(ctx.obj, 19)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.argument('uuid', required=True, type=str)
|
|
||||||
@click.pass_context
|
|
||||||
def delete(ctx, uuid):
|
|
||||||
"""Delete the object identified by its UUID."""
|
|
||||||
client = ctx.obj['client']
|
|
||||||
u = ""
|
|
||||||
try:
|
|
||||||
u = UUID(uuid)
|
|
||||||
except ValueError as e:
|
|
||||||
click.echo(f"'{uuid}' is not a valid UUID.", err=True)
|
|
||||||
exit_client(ctx.obj, 13)
|
|
||||||
|
|
||||||
try:
|
|
||||||
delete_object_by_uuid(client, ctx.obj['bbs'], u)
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
except Exception as e:
|
|
||||||
click.echo(e, err=True)
|
|
||||||
exit_client(ctx.obj, 19)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option('--number', '-n', type=int, default=0, help="Number of objects to list. Default 0 for all.")
|
|
||||||
@click.option('--search', '-S', type=str, default=None, help="Search string to filter objects with.")
|
|
||||||
@click.option('--reverse', '-r', is_flag=True, default=False, help="Return results in reverse order.")
|
|
||||||
@click.option('--sort-by', '-B', default='date', help="Sort objects by size, date(default), or name",
|
|
||||||
type=click.Choice(['size', 'name', 'date'], case_sensitive=False))
|
|
||||||
@click.option("--output-format", "-f", default="table", help="Print data as table[default], list, or JSON",
|
|
||||||
type=click.Choice(['table', 'json', 'list'], case_sensitive=False))
|
|
||||||
@click.pass_context
|
|
||||||
def list_objects(ctx, number, search, reverse, sort_by, output_format):
|
|
||||||
"""Get a list of user objects without the data."""
|
|
||||||
# def get_user_objects(client: Client, bbs_callsign: str, limit: int = 10, include_data: bool = True, search: str = None,
|
|
||||||
# reverse: bool = False, sort_date: bool = False, sort_name: bool = False, sort_size: bool = False)\
|
|
||||||
# -> list[ObjectWrapper]:
|
|
||||||
|
|
||||||
client = ctx.obj['client']
|
|
||||||
sort_date = False
|
|
||||||
sort_name = False
|
|
||||||
sort_size = False
|
|
||||||
|
|
||||||
if sort_by == "size":
|
|
||||||
sort_size = True
|
|
||||||
elif sort_by == "name":
|
|
||||||
sort_name = True
|
|
||||||
else:
|
|
||||||
sort_date = True
|
|
||||||
|
|
||||||
object_list = get_user_objects(client, ctx.obj['bbs'], limit=number, include_data=False, search=search,
|
|
||||||
reverse=reverse, sort_date=sort_date, sort_name=sort_name, sort_size=sort_size)
|
|
||||||
|
|
||||||
obj_dicts = []
|
|
||||||
for x in object_list:
|
|
||||||
d = deepcopy(x.obj_data)
|
|
||||||
d['uuid'] = ""
|
|
||||||
if 'uuid_bytes' in d:
|
|
||||||
d['uuid'] = str(UUID(bytes=d['uuid_bytes']))
|
|
||||||
del d['uuid_bytes']
|
|
||||||
if 'data' in d:
|
|
||||||
del d['data']
|
|
||||||
if 'includes_data' in d:
|
|
||||||
del d['includes_data']
|
|
||||||
obj_dicts.append(d)
|
|
||||||
|
|
||||||
click.echo(format_list_dicts(obj_dicts, output_format=output_format.lower()))
|
|
||||||
exit_client(ctx.obj, 0)
|
|
||||||
|
|
||||||
objects.add_command(upload_file)
|
|
||||||
objects.add_command(list_objects, name='list')
|
|
||||||
objects.add_command(get)
|
|
||||||
objects.add_command(delete)
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
from tabulate import tabulate
|
|
||||||
import json
|
|
||||||
import click
|
|
||||||
from packetserver.client import Client
|
|
||||||
import sys
|
|
||||||
import ZODB
|
|
||||||
from persistent.mapping import PersistentMapping
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
def format_list_dicts(dicts: list[dict], output_format: str = "table") -> str:
|
|
||||||
if output_format == "table":
|
|
||||||
return tabulate(dicts, headers="keys")
|
|
||||||
|
|
||||||
elif output_format == "json":
|
|
||||||
return json.dumps(dicts, indent=2)
|
|
||||||
|
|
||||||
elif output_format == "list":
|
|
||||||
output = ""
|
|
||||||
for i in dicts:
|
|
||||||
t = []
|
|
||||||
for key in i:
|
|
||||||
t.append([str(key), str(i[key])])
|
|
||||||
output = output + tabulate(t) + "\n"
|
|
||||||
return output
|
|
||||||
else:
|
|
||||||
raise ValueError("Unsupported format type.")
|
|
||||||
|
|
||||||
def write_request_log(db: ZODB.DB, client: Client):
|
|
||||||
with db.transaction() as db_trans:
|
|
||||||
if not 'request_log' in db_trans.root():
|
|
||||||
db_trans['request_log'] = PersistentMapping()
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
db_trans['request_log'][now.isoformat()] = client.request_log
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def exit_client(context: dict, return_code: int, message=""):
|
|
||||||
client = context['client']
|
|
||||||
db = context['db']
|
|
||||||
client.stop()
|
|
||||||
|
|
||||||
if context['keep_log']:
|
|
||||||
write_request_log(db, client)
|
|
||||||
|
|
||||||
db.close()
|
|
||||||
client.stop()
|
|
||||||
if return_code == 0:
|
|
||||||
is_err = False
|
|
||||||
else:
|
|
||||||
is_err = True
|
|
||||||
if message.strip() != "":
|
|
||||||
click.echo(message, err=is_err)
|
|
||||||
sys.exit(return_code)
|
|
||||||
|
|
||||||
unit_seconds ={
|
|
||||||
'h': 3600,
|
|
||||||
'm': 60,
|
|
||||||
's': 1,
|
|
||||||
'd': 86400
|
|
||||||
}
|
|
||||||
@@ -1,179 +0,0 @@
|
|||||||
from packetserver.client import Client
|
|
||||||
from packetserver.common import Request, Response, PacketServerConnection
|
|
||||||
from typing import Union, Optional
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
|
|
||||||
class JobWrapper:
|
|
||||||
def __init__(self, data: dict):
|
|
||||||
for i in ['output', 'errors', 'artifacts', 'return_code', 'status']:
|
|
||||||
if i not in data:
|
|
||||||
raise ValueError("Was not given a job dictionary.")
|
|
||||||
self.data = data
|
|
||||||
self.artifacts = {}
|
|
||||||
for i in data['artifacts']:
|
|
||||||
self.artifacts[i[0]] = i[1]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def return_code(self) -> int:
|
|
||||||
return self.data['return_code']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output_raw(self) -> bytes:
|
|
||||||
return self.data['output']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output_str(self) -> str:
|
|
||||||
return self.data['output'].decode()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def errors_raw(self) -> bytes:
|
|
||||||
return self.data['errors']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def errors_str(self) -> str:
|
|
||||||
return self.data['errors'].decode()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def status(self) -> str:
|
|
||||||
return self.data['status']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def owner(self) -> str:
|
|
||||||
return self.data['owner']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cmd(self) -> Union[str, list]:
|
|
||||||
return self.data['cmd']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def created(self) -> datetime.datetime:
|
|
||||||
return datetime.datetime.fromisoformat(self.data['created_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def started(self) -> Optional[datetime.datetime]:
|
|
||||||
if not self.data['created_at']:
|
|
||||||
return None
|
|
||||||
return datetime.datetime.fromisoformat(self.data['created_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def finished(self) -> Optional[datetime.datetime]:
|
|
||||||
if not self.data['finished_at']:
|
|
||||||
return None
|
|
||||||
return datetime.datetime.fromisoformat(self.data['finished_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_finished(self) -> bool:
|
|
||||||
if self.finished is not None:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def id(self) -> int:
|
|
||||||
return self.data['id']
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Job {self.id} - {self.owner} - {self.status}>"
|
|
||||||
|
|
||||||
def send_job(client: Client, bbs_callsign: str, cmd: Union[str, list], db: bool = False, env: dict = None,
|
|
||||||
files: dict = None) -> int:
|
|
||||||
"""Send a job using client to bbs_callsign with args cmd. Return remote job_id."""
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "job"
|
|
||||||
req.payload = {'cmd': cmd}
|
|
||||||
if db:
|
|
||||||
req.payload['db'] = ''
|
|
||||||
if env is not None:
|
|
||||||
req.payload['env']= env
|
|
||||||
if files is not None:
|
|
||||||
req.payload['files'] = files
|
|
||||||
req.method = Request.Method.POST
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 201:
|
|
||||||
raise RuntimeError(f"Sending job failed: {response.status_code}: {response.payload}")
|
|
||||||
return response.payload['job_id']
|
|
||||||
|
|
||||||
def send_job_quick(client: Client, bbs_callsign: str, cmd: Union[str, list], db: bool = False, env: dict = None,
|
|
||||||
files: dict = None) -> JobWrapper:
|
|
||||||
"""Send a job using client to bbs_callsign with args cmd. Wait for quick job to return job results."""
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "job"
|
|
||||||
req.payload = {'cmd': cmd}
|
|
||||||
req.set_var('quick', True)
|
|
||||||
if db:
|
|
||||||
req.payload['db'] = ''
|
|
||||||
if env is not None:
|
|
||||||
req.payload['env']= env
|
|
||||||
if files is not None:
|
|
||||||
req.payload['files'] = files
|
|
||||||
req.method = Request.Method.POST
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code == 200:
|
|
||||||
return JobWrapper(response.payload)
|
|
||||||
elif response.status_code == 202:
|
|
||||||
raise RuntimeError(f"Quick Job timed out. Job ID: {response.payload}")
|
|
||||||
else:
|
|
||||||
raise RuntimeError(f"Waiting for quick job failed: {response.status_code}: {response.payload}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_job_id(client: Client, bbs_callsign: str, job_id: int, get_data=True) -> JobWrapper:
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = f"job/{job_id}"
|
|
||||||
req.set_var('data', get_data)
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET job {job_id} failed: {response.status_code}: {response.payload}")
|
|
||||||
return JobWrapper(response.payload)
|
|
||||||
|
|
||||||
def get_user_jobs(client: Client, bbs_callsign: str, get_data=True) -> list[JobWrapper]:
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = f"job/user"
|
|
||||||
req.set_var('data', get_data)
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET user jobs failed: {response.status_code}: {response.payload}")
|
|
||||||
jobs = []
|
|
||||||
for j in response.payload:
|
|
||||||
jobs.append(JobWrapper(j))
|
|
||||||
return jobs
|
|
||||||
|
|
||||||
class JobSession:
|
|
||||||
def __init__(self, client: Client, bbs_callsign: str, default_timeout: int = 300, stutter: int = 2):
|
|
||||||
self.client = client
|
|
||||||
self.bbs = bbs_callsign
|
|
||||||
self.timeout = default_timeout
|
|
||||||
self.stutter = stutter
|
|
||||||
self.job_log = []
|
|
||||||
|
|
||||||
def connect(self) -> PacketServerConnection:
|
|
||||||
return self.client.new_connection(self.bbs)
|
|
||||||
|
|
||||||
def send(self, cmd: Union[str, list], db: bool = False, env: dict = None, files: dict = None) -> int:
|
|
||||||
return send_job(self.client, self.bbs, cmd, db=db, env=env, files=files)
|
|
||||||
|
|
||||||
def send_quick(self, cmd: Union[str, list], db: bool = False, env: dict = None, files: dict = None) -> JobWrapper:
|
|
||||||
return send_job_quick(self.client, self.bbs, cmd, db=db, env=env, files=files)
|
|
||||||
|
|
||||||
def get_id(self, jid: int) -> JobWrapper:
|
|
||||||
return get_job_id(self.client, self.bbs, jid)
|
|
||||||
|
|
||||||
def run_job(self, cmd: Union[str, list], db: bool = False, env: dict = None, files: dict = None,
|
|
||||||
quick: bool = False) -> JobWrapper:
|
|
||||||
if quick:
|
|
||||||
j = self.send_quick(cmd, db=db, env=env, files=files)
|
|
||||||
self.job_log.append(j)
|
|
||||||
return j
|
|
||||||
else:
|
|
||||||
jid = self.send(cmd, db=db, env=env, files=files)
|
|
||||||
time.sleep(self.stutter)
|
|
||||||
j = self.get_id(jid)
|
|
||||||
while not j.is_finished:
|
|
||||||
time.sleep(self.stutter)
|
|
||||||
j = self.get_id(jid)
|
|
||||||
self.job_log.append(j)
|
|
||||||
return j
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,231 +0,0 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
from packetserver.client import Client
|
|
||||||
from packetserver.common import Request, Response, PacketServerConnection
|
|
||||||
from packetserver.common.util import to_date_digits
|
|
||||||
from typing import Union, Optional
|
|
||||||
from uuid import UUID, uuid4
|
|
||||||
import os.path
|
|
||||||
import base64
|
|
||||||
|
|
||||||
|
|
||||||
class AttachmentWrapper:
|
|
||||||
def __init__(self, data: dict):
|
|
||||||
for i in ['name', 'binary', 'data']:
|
|
||||||
if i not in data.keys():
|
|
||||||
raise ValueError("Data dict was not an attachment dictionary.")
|
|
||||||
self._data = data
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<AttachmentWrapper: {self.name}>"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return self._data['name']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def binary(self) -> bool:
|
|
||||||
return self._data['binary']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data(self) -> Union[str,bytes]:
|
|
||||||
if self.binary:
|
|
||||||
return self._data['data']
|
|
||||||
else:
|
|
||||||
return self._data['data'].decode()
|
|
||||||
|
|
||||||
def to_dict(self, json: bool = True) -> dict:
|
|
||||||
d = {
|
|
||||||
"name": self.name,
|
|
||||||
"binary": self.binary,
|
|
||||||
}
|
|
||||||
|
|
||||||
if not self.binary:
|
|
||||||
d['data'] = self.data
|
|
||||||
else:
|
|
||||||
if json:
|
|
||||||
d['data'] = base64.b64encode(self.data).decode()
|
|
||||||
else:
|
|
||||||
d['data'] = self.data
|
|
||||||
return d
|
|
||||||
|
|
||||||
class MessageWrapper:
|
|
||||||
def __init__(self, data: dict):
|
|
||||||
for i in ['attachments', 'to', 'from', 'id', 'sent_at', 'text']:
|
|
||||||
if i not in data.keys():
|
|
||||||
raise ValueError("Data dict was not a message dictionary.")
|
|
||||||
self.data = data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def text(self) -> str:
|
|
||||||
return self.data['text']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def sent(self) -> datetime.datetime:
|
|
||||||
return datetime.datetime.fromisoformat(self.data['sent_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def msg_id(self) -> UUID:
|
|
||||||
return UUID(self.data['id'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def from_user(self) -> str:
|
|
||||||
return self.data['from']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def to_users(self) -> list[str]:
|
|
||||||
return self.data['to']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def attachments(self) -> list[AttachmentWrapper]:
|
|
||||||
a_list = []
|
|
||||||
for a in self.data['attachments']:
|
|
||||||
a_list.append(AttachmentWrapper(a))
|
|
||||||
return a_list
|
|
||||||
|
|
||||||
def to_dict(self, json: bool = True) -> dict:
|
|
||||||
d = {
|
|
||||||
'text': self.text,
|
|
||||||
'sent': self.sent,
|
|
||||||
'id': self.msg_id,
|
|
||||||
'to': self.to_users,
|
|
||||||
'from': self.from_user,
|
|
||||||
'attachments': []
|
|
||||||
}
|
|
||||||
|
|
||||||
if json:
|
|
||||||
d['id'] = str(d['id'])
|
|
||||||
d['sent'] = d['sent'].isoformat()
|
|
||||||
for a in self.attachments:
|
|
||||||
d['attachments'].append(a.to_dict(json=True))
|
|
||||||
else:
|
|
||||||
for a in self.attachments:
|
|
||||||
d['attachments'].append(a.to_dict(json=False))
|
|
||||||
return d
|
|
||||||
|
|
||||||
class MsgAttachment:
|
|
||||||
def __init__(self, name: str, data: Union[bytes,str]):
|
|
||||||
self.binary = True
|
|
||||||
self.name = name
|
|
||||||
if type(data) in [bytes, bytearray]:
|
|
||||||
self.data = data
|
|
||||||
else:
|
|
||||||
self.data = str(data).encode()
|
|
||||||
self.binary = False
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<MsgAttachment {self.name}>"
|
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
|
||||||
return {
|
|
||||||
"name": self.name,
|
|
||||||
"data": self.data,
|
|
||||||
"binary": self.binary
|
|
||||||
}
|
|
||||||
|
|
||||||
def attachment_from_file(filename: str, binary: bool = True) -> MsgAttachment:
|
|
||||||
a = MsgAttachment(os.path.basename(filename), open(filename, 'rb').read())
|
|
||||||
if not binary:
|
|
||||||
a.binary = False
|
|
||||||
return a
|
|
||||||
|
|
||||||
def send_message(client: Client, bbs_callsign: str, text: str, to: list[str],
|
|
||||||
attachments: list[MsgAttachment] = None) -> dict:
|
|
||||||
payload = {
|
|
||||||
"text": text,
|
|
||||||
"to": to,
|
|
||||||
"attachments": []
|
|
||||||
}
|
|
||||||
if attachments is not None:
|
|
||||||
for a in attachments:
|
|
||||||
payload["attachments"].append(a.to_dict())
|
|
||||||
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "message"
|
|
||||||
req.method = Request.Method.POST
|
|
||||||
req.payload = payload
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 201:
|
|
||||||
raise RuntimeError(f"POST message failed: {response.status_code}: {response.payload}")
|
|
||||||
return response.payload
|
|
||||||
|
|
||||||
def get_message_uuid(client: Client, bbs_callsign: str, msg_id: UUID, get_attachments: bool = True) -> MessageWrapper:
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "message"
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
req.set_var('id', msg_id.bytes)
|
|
||||||
req.set_var('fetch_attachments', get_attachments)
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET message failed: {response.status_code}: {response.payload}")
|
|
||||||
return MessageWrapper(response.payload)
|
|
||||||
|
|
||||||
def get_messages_since(client: Client, bbs_callsign: str, since: datetime.datetime, get_text: bool = True, limit: int = None,
|
|
||||||
sort_by: str = 'date', reverse: bool = False, search: str = None, get_attachments: bool = True,
|
|
||||||
source: str = 'received') -> list[MessageWrapper]:
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "message"
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
|
|
||||||
# put vars together
|
|
||||||
req.set_var('since', to_date_digits(since))
|
|
||||||
|
|
||||||
source = source.lower().strip()
|
|
||||||
if source not in ['sent', 'received', 'all']:
|
|
||||||
raise ValueError("Source variable must be ['sent', 'received', 'all']")
|
|
||||||
req.set_var('source', source)
|
|
||||||
|
|
||||||
req.set_var('limit', limit)
|
|
||||||
req.set_var('fetch_text', get_text)
|
|
||||||
req.set_var('reverse', reverse)
|
|
||||||
req.set_var('fetch_attachments', get_attachments)
|
|
||||||
|
|
||||||
if sort_by.strip().lower() not in ['date', 'from', 'to']:
|
|
||||||
raise ValueError("sort_by must be in ['date', 'from', 'to']")
|
|
||||||
req.set_var('sort', sort_by)
|
|
||||||
|
|
||||||
if type(search) is str:
|
|
||||||
req.set_var('search', search)
|
|
||||||
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET message failed: {response.status_code}: {response.payload}")
|
|
||||||
msg_list = []
|
|
||||||
for m in response.payload:
|
|
||||||
msg_list.append(MessageWrapper(m))
|
|
||||||
return msg_list
|
|
||||||
|
|
||||||
def get_messages(client: Client, bbs_callsign: str, get_text: bool = True, limit: int = None,
|
|
||||||
sort_by: str = 'date', reverse: bool = True, search: str = None, get_attachments: bool = True,
|
|
||||||
source: str = 'received') -> list[MessageWrapper]:
|
|
||||||
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "message"
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
|
|
||||||
# put vars together
|
|
||||||
|
|
||||||
source = source.lower().strip()
|
|
||||||
if source not in ['sent', 'received', 'all']:
|
|
||||||
raise ValueError("Source variable must be ['sent', 'received', 'all']")
|
|
||||||
req.set_var('source', source)
|
|
||||||
|
|
||||||
req.set_var('limit', limit)
|
|
||||||
req.set_var('fetch_text', get_text)
|
|
||||||
req.set_var('reverse', reverse)
|
|
||||||
req.set_var('fetch_attachments', get_attachments)
|
|
||||||
|
|
||||||
if sort_by.strip().lower() not in ['date', 'from', 'to']:
|
|
||||||
raise ValueError("sort_by must be in ['date', 'from', 'to']")
|
|
||||||
req.set_var('sort', sort_by)
|
|
||||||
|
|
||||||
if type(search) is str:
|
|
||||||
req.set_var('search', search)
|
|
||||||
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET message failed: {response.status_code}: {response.payload}")
|
|
||||||
msg_list = []
|
|
||||||
for m in response.payload:
|
|
||||||
msg_list.append(MessageWrapper(m))
|
|
||||||
return msg_list
|
|
||||||
@@ -1,161 +0,0 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
from packetserver.client import Client
|
|
||||||
from packetserver.common import Request, Response, PacketServerConnection
|
|
||||||
from typing import Union, Optional
|
|
||||||
from uuid import UUID, uuid4
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectWrapper:
|
|
||||||
def __init__(self, data: dict):
|
|
||||||
for i in ['name', 'uuid_bytes', 'binary', 'private', 'created_at', 'modified_at', 'data']:
|
|
||||||
if i not in data.keys():
|
|
||||||
raise ValueError("Data dict was not an object dictionary.")
|
|
||||||
self.obj_data = data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> Optional[str]:
|
|
||||||
return self.obj_data['name']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self) -> int:
|
|
||||||
return len(self.obj_data['data'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def created(self) -> datetime.datetime:
|
|
||||||
return datetime.datetime.fromisoformat(self.obj_data['created_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def modified(self) -> datetime.datetime:
|
|
||||||
return datetime.datetime.fromisoformat(self.obj_data['modified_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def uuid(self) -> UUID:
|
|
||||||
return UUID(bytes=self.obj_data['uuid_bytes'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def private(self) -> bool:
|
|
||||||
return self.obj_data['private']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def binary(self) -> bool:
|
|
||||||
return self.obj_data['binary']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data(self) -> Union[str, bytes]:
|
|
||||||
dat = self.obj_data['data']
|
|
||||||
if type(dat) is str:
|
|
||||||
dat = dat.encode()
|
|
||||||
if self.binary:
|
|
||||||
return dat
|
|
||||||
else:
|
|
||||||
return dat.decode()
|
|
||||||
|
|
||||||
|
|
||||||
def post_object(client: Client, bbs_callsign: str, name:str, data: Union[str, bytes, bytearray], private=True) -> UUID:
|
|
||||||
if type(data) in [bytes, bytearray]:
|
|
||||||
data = bytes(data)
|
|
||||||
binary = True
|
|
||||||
else:
|
|
||||||
binary = False
|
|
||||||
data = str(data).encode()
|
|
||||||
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "object"
|
|
||||||
req.payload = {'name': name, 'data': data, 'binary': binary, 'private': private}
|
|
||||||
req.method = Request.Method.POST
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 201:
|
|
||||||
raise RuntimeError(f"Posting object failed: {response.status_code}: {response.payload}")
|
|
||||||
return UUID(response.payload)
|
|
||||||
|
|
||||||
def post_file(client: Client, bbs_callsign: str, file_path: str, private=True, name: str = None, binary=True) -> UUID:
|
|
||||||
if name is None:
|
|
||||||
obj_name = os.path.basename(file_path)
|
|
||||||
else:
|
|
||||||
obj_name = os.path.basename(str(name))
|
|
||||||
if binary:
|
|
||||||
mode = 'rb'
|
|
||||||
else:
|
|
||||||
mode = 'r'
|
|
||||||
data = open(file_path, mode).read()
|
|
||||||
return post_object(client, bbs_callsign, obj_name, data, private=private)
|
|
||||||
|
|
||||||
def get_object_by_uuid(client: Client, bbs_callsign: str, uuid: Union[str, bytes, UUID, int],
|
|
||||||
include_data=True) -> ObjectWrapper:
|
|
||||||
if type(uuid) is str:
|
|
||||||
uid = UUID(uuid)
|
|
||||||
elif type(uuid) is bytes:
|
|
||||||
uid = UUID(bytes=uuid)
|
|
||||||
elif type(uuid) is UUID:
|
|
||||||
uid = uuid
|
|
||||||
elif type(uuid) is int:
|
|
||||||
uid = UUID(int=uuid)
|
|
||||||
else:
|
|
||||||
raise ValueError("uuid must represent a UUID object")
|
|
||||||
|
|
||||||
req = Request.blank()
|
|
||||||
if include_data:
|
|
||||||
req.set_var('fetch', 1)
|
|
||||||
req.path = "object"
|
|
||||||
req.set_var('uuid', uid.bytes)
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET object {uid} failed: {response.status_code}: {response.payload}")
|
|
||||||
return ObjectWrapper(response.payload)
|
|
||||||
|
|
||||||
def get_user_objects(client: Client, bbs_callsign: str, limit: int = 10, include_data: bool = True, search: str = None,
|
|
||||||
reverse: bool = False, sort_date: bool = False, sort_name: bool = False, sort_size: bool = False)\
|
|
||||||
-> list[ObjectWrapper]:
|
|
||||||
|
|
||||||
req = Request.blank()
|
|
||||||
if include_data:
|
|
||||||
req.set_var('fetch', 1)
|
|
||||||
if sort_date:
|
|
||||||
req.set_var('sort', 'date')
|
|
||||||
if sort_size:
|
|
||||||
req.set_var('sort', 'size')
|
|
||||||
if sort_name:
|
|
||||||
req.set_var('sort', 'name')
|
|
||||||
req.set_var('reverse', reverse)
|
|
||||||
if limit != 0:
|
|
||||||
req.set_var('limit', limit)
|
|
||||||
if search is not None:
|
|
||||||
req.set_var('search', str(search))
|
|
||||||
req.path = "object"
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"Listing objects failed: {response.status_code}: {response.payload}")
|
|
||||||
out_list = []
|
|
||||||
for o in response.payload:
|
|
||||||
out_list.append(ObjectWrapper(o))
|
|
||||||
return out_list
|
|
||||||
|
|
||||||
def delete_object_by_uuid(client: Client, bbs_callsign: str, uuid: Union[str, bytes, UUID, int]) -> bool:
|
|
||||||
if type(uuid) is str:
|
|
||||||
uid = UUID(uuid)
|
|
||||||
elif type(uuid) is bytes:
|
|
||||||
uid = UUID(bytes=uuid)
|
|
||||||
elif type(uuid) is UUID:
|
|
||||||
uid = uuid
|
|
||||||
elif type(uuid) is int:
|
|
||||||
uid = UUID(int=uuid)
|
|
||||||
else:
|
|
||||||
raise ValueError("uuid must represent a UUID object")
|
|
||||||
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "object"
|
|
||||||
req.set_var('uuid', uid.bytes)
|
|
||||||
req.method = Request.Method.DELETE
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"Deleting object {uid} failed: {response.status_code}: {response.payload}")
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_object_by_uuid():
|
|
||||||
# TODO update object by uuid client
|
|
||||||
pass
|
|
||||||
@@ -1,85 +0,0 @@
|
|||||||
import os.path
|
|
||||||
import time
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
from packetserver.common import Request, PacketServerConnection
|
|
||||||
from packetserver.common.testing import SimpleDirectoryConnection
|
|
||||||
from packetserver.client import Client
|
|
||||||
import ax25
|
|
||||||
from threading import Lock
|
|
||||||
import logging
|
|
||||||
import os.path
|
|
||||||
import datetime
|
|
||||||
from shutil import rmtree
|
|
||||||
|
|
||||||
class TestClient(Client):
|
|
||||||
def __init__(self, conn_dir: str, callsign: str, keep_log: bool = True):
|
|
||||||
super().__init__('', 0, callsign, keep_log=keep_log)
|
|
||||||
self._connections = {}
|
|
||||||
if not os.path.isdir(conn_dir):
|
|
||||||
raise NotADirectoryError(f"Conn dir {conn_dir} does not exist.")
|
|
||||||
self._connection_directory = os.path.abspath(conn_dir)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def connections(self) -> dict:
|
|
||||||
return self._connections
|
|
||||||
|
|
||||||
def connection_exists(self, callsign: str):
|
|
||||||
if not ax25.Address.valid_call(callsign):
|
|
||||||
raise ValueError("Must supply a valid callsign.")
|
|
||||||
callsign = callsign.upper().strip()
|
|
||||||
for key in self.connections.keys():
|
|
||||||
if key.split(":")[1] == callsign:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def new_connection(self, dest: str) -> SimpleDirectoryConnection:
|
|
||||||
if not self.started:
|
|
||||||
raise RuntimeError("Must start client before creating connections.")
|
|
||||||
if not ax25.Address.valid_call(dest):
|
|
||||||
raise ValueError(f"Provided destination callsign '{dest}' is invalid.")
|
|
||||||
with self.lock_locker:
|
|
||||||
if dest.upper() not in self._connection_locks:
|
|
||||||
self._connection_locks[dest.upper()] = Lock()
|
|
||||||
with self._connection_locks[dest.upper()]:
|
|
||||||
conn = self.connection_callsign(dest.upper())
|
|
||||||
if conn is not None:
|
|
||||||
return conn
|
|
||||||
|
|
||||||
conn_dir = os.path.join(self._connection_directory, f"{self.callsign.upper()}--{dest.upper()}")
|
|
||||||
if not os.path.isdir(conn_dir):
|
|
||||||
os.mkdir(conn_dir)
|
|
||||||
conn = SimpleDirectoryConnection.create_directory_connection(self.callsign, conn_dir)
|
|
||||||
self.connections[f"{dest.upper()}:{self.callsign.upper()}"] = conn
|
|
||||||
logging.debug(f"Connection to {dest} ready.")
|
|
||||||
return conn
|
|
||||||
|
|
||||||
def receive(self, req: Request, conn: Union[PacketServerConnection,SimpleDirectoryConnection], timeout: int = 300):
|
|
||||||
if type(conn) is SimpleDirectoryConnection:
|
|
||||||
time.sleep(1)
|
|
||||||
cutoff_date = datetime.datetime.now() + datetime.timedelta(seconds=timeout)
|
|
||||||
while datetime.datetime.now() < cutoff_date:
|
|
||||||
logging.debug(f"Client {self.callsign} checking for connection conn {conn}")
|
|
||||||
if conn.check_for_data():
|
|
||||||
break
|
|
||||||
return super().receive(req, conn, timeout=timeout)
|
|
||||||
|
|
||||||
def clear_connections(self):
|
|
||||||
closing = [x for x in self.connections]
|
|
||||||
for key in closing:
|
|
||||||
conn = self.connections[key]
|
|
||||||
conn.closing = True
|
|
||||||
conn.check_closed()
|
|
||||||
while os.path.exists(conn.directory):
|
|
||||||
try:
|
|
||||||
rmtree(conn.directory)
|
|
||||||
except:
|
|
||||||
time.sleep(.5)
|
|
||||||
pass
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
self.started = True
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.clear_connections()
|
|
||||||
self.started = False
|
|
||||||
@@ -1,122 +0,0 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
from packetserver.client import Client
|
|
||||||
from packetserver.common import Request, Response, PacketServerConnection
|
|
||||||
from typing import Union, Optional
|
|
||||||
from packetserver.common.util import email_valid, random_string
|
|
||||||
from uuid import UUID, uuid4
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
class UserWrapper:
|
|
||||||
|
|
||||||
def __init__(self, data: dict):
|
|
||||||
for i in ['username', 'status', 'bio', 'socials', 'created_at', 'last_seen', 'email', 'location']:
|
|
||||||
if i not in data.keys():
|
|
||||||
raise ValueError("Data dict was not an object dictionary.")
|
|
||||||
self.data = data
|
|
||||||
|
|
||||||
def pretty_dict(self) -> dict:
|
|
||||||
out_dict = {}
|
|
||||||
for a in ['username', 'status', 'bio', 'socials', 'created', 'last_seen', 'email', 'location']:
|
|
||||||
if a != 'socials':
|
|
||||||
out_dict[a] = str(getattr(self, a))
|
|
||||||
else:
|
|
||||||
social_str = "\n".join(self.socials)
|
|
||||||
out_dict['socials'] = social_str
|
|
||||||
|
|
||||||
return out_dict
|
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<UserWrapper: {self.username}>"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def socials(self) -> list[str]:
|
|
||||||
return self.data['socials']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def created(self) -> datetime.datetime:
|
|
||||||
return datetime.datetime.fromisoformat(self.data['created_at'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_seen(self) -> Optional[datetime.datetime]:
|
|
||||||
if self.data['last_seen'] is not None:
|
|
||||||
return datetime.datetime.fromisoformat(self.data['last_seen'])
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def username(self) -> str:
|
|
||||||
return self.data['username']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def status(self) -> str:
|
|
||||||
return self.data['status']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bio(self) -> str:
|
|
||||||
return self.data['bio']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def email(self) -> str:
|
|
||||||
return self.data['email']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def location(self) -> str:
|
|
||||||
return self.data['location']
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_by_username(client: Client, bbs_callsign: str, username: str) -> UserWrapper:
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "user"
|
|
||||||
req.set_var('username', username.strip().upper())
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET user {username} failed: {response.status_code}: {response.payload}")
|
|
||||||
return UserWrapper(response.payload)
|
|
||||||
|
|
||||||
def get_users(client: Client, bbs_callsign: str, limit=None):
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "user"
|
|
||||||
if limit is not None:
|
|
||||||
req.set_var('limit', limit)
|
|
||||||
req.method = Request.Method.GET
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"GET userlist failed: {response.status_code}: {response.payload}")
|
|
||||||
user_list = []
|
|
||||||
for u in response.payload:
|
|
||||||
user_list.append(UserWrapper(u))
|
|
||||||
return user_list
|
|
||||||
|
|
||||||
def update_self(client: Client, bbs_callsign: str, email: str = None, bio: str = None,
|
|
||||||
socials: Union[list[str],str] = None, location: str = None, status: str = None) -> bool:
|
|
||||||
|
|
||||||
payload = {}
|
|
||||||
|
|
||||||
if email is not None:
|
|
||||||
if not email_valid(email):
|
|
||||||
raise ValueError(f"{email} is not a valid e-mail address")
|
|
||||||
payload['email'] = email
|
|
||||||
|
|
||||||
if socials is not None:
|
|
||||||
payload['social'] = socials
|
|
||||||
|
|
||||||
if status is not None:
|
|
||||||
payload['status'] = str(status)
|
|
||||||
|
|
||||||
if location is not None:
|
|
||||||
payload['location'] = str(location)
|
|
||||||
|
|
||||||
if bio is not None:
|
|
||||||
payload['bio'] = str(bio)
|
|
||||||
|
|
||||||
req = Request.blank()
|
|
||||||
req.path = "user"
|
|
||||||
req.method = Request.Method.UPDATE
|
|
||||||
req.payload = payload
|
|
||||||
response = client.send_receive_callsign(req, bbs_callsign)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise RuntimeError(f"Updating profile failed: {response.status_code}: {response.payload}")
|
|
||||||
return True
|
|
||||||
@@ -1,343 +0,0 @@
|
|||||||
from pe.connect import Connection, ConnectionState
|
|
||||||
from threading import Lock
|
|
||||||
from msgpack import Unpacker
|
|
||||||
from msgpack import packb, unpackb
|
|
||||||
from enum import Enum
|
|
||||||
import bz2
|
|
||||||
from typing import Union, Self
|
|
||||||
import datetime
|
|
||||||
import logging
|
|
||||||
import ax25
|
|
||||||
|
|
||||||
|
|
||||||
class PacketServerConnection(Connection):
|
|
||||||
|
|
||||||
connection_subscribers = []
|
|
||||||
receive_subscribers = []
|
|
||||||
max_send_size = 2000
|
|
||||||
|
|
||||||
def __init__(self, port, call_from, call_to, incoming=False):
|
|
||||||
super().__init__(port, call_from, call_to, incoming=incoming)
|
|
||||||
# Now perform any initialization of your own that you might need
|
|
||||||
self.data = Unpacker()
|
|
||||||
self.data_lock = Lock()
|
|
||||||
self.connection_created = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.connection_last_activity = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.closing = False
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
|
||||||
def local_callsign(self):
|
|
||||||
if self.incoming:
|
|
||||||
return self.call_to
|
|
||||||
else:
|
|
||||||
return self.call_from
|
|
||||||
|
|
||||||
@property
|
|
||||||
def remote_callsign(self):
|
|
||||||
if self.incoming:
|
|
||||||
return self.call_from
|
|
||||||
else:
|
|
||||||
return self.call_to
|
|
||||||
|
|
||||||
def connected(self):
|
|
||||||
logging.debug("connected")
|
|
||||||
logging.debug(f"new connection from {self.call_from} to {self.call_to}")
|
|
||||||
for fn in PacketServerConnection.connection_subscribers:
|
|
||||||
fn(self)
|
|
||||||
|
|
||||||
def disconnected(self):
|
|
||||||
logging.debug(f"connection disconnected: {self.call_from} -> {self.call_to}")
|
|
||||||
|
|
||||||
def data_received(self, pid, data):
|
|
||||||
self.connection_last_activity = datetime.datetime.now(datetime.UTC)
|
|
||||||
logging.debug(f"received data: {data}")
|
|
||||||
with self.data_lock:
|
|
||||||
logging.debug(f"fed received data to unpacker {data}")
|
|
||||||
self.data.feed(data)
|
|
||||||
for fn in PacketServerConnection.receive_subscribers:
|
|
||||||
logging.debug("found function to notify about received data")
|
|
||||||
fn(self)
|
|
||||||
logging.debug("notified function about received data")
|
|
||||||
|
|
||||||
def send_data(self, data: Union[bytes, bytearray]):
|
|
||||||
logging.debug(f"sending data: {data}")
|
|
||||||
self.connection_last_activity = datetime.datetime.now(datetime.UTC)
|
|
||||||
if len(data) > self.max_send_size:
|
|
||||||
logging.debug(f"Large frame detected {len(data)} breaking it up into chunks")
|
|
||||||
index = 0
|
|
||||||
counter = 0
|
|
||||||
while index <= len(data):
|
|
||||||
logging.debug(f"Sending chunk {counter}")
|
|
||||||
if (len(data) - index) < self.max_send_size:
|
|
||||||
super().send_data(data[index:])
|
|
||||||
break
|
|
||||||
super().send_data(data[index:index + self.max_send_size])
|
|
||||||
index = index + self.max_send_size
|
|
||||||
counter = counter + 1
|
|
||||||
else:
|
|
||||||
super().send_data(data)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def query_accept(cls, port, call_from, call_to):
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class Message:
|
|
||||||
"""Base class for communication encapsulated in msgpack objects."""
|
|
||||||
|
|
||||||
class CompressionType(Enum):
|
|
||||||
NONE = 0
|
|
||||||
BZIP2 = 1
|
|
||||||
GZIP = 2
|
|
||||||
DEFLATE = 3
|
|
||||||
|
|
||||||
class MessageType(Enum):
|
|
||||||
REQUEST = 0
|
|
||||||
RESPONSE = 1
|
|
||||||
|
|
||||||
def __init__(self, msg_type: MessageType, compression: CompressionType, payload: dict):
|
|
||||||
self.type = Message.MessageType(msg_type)
|
|
||||||
self.compression = Message.CompressionType(compression)
|
|
||||||
self.data = payload
|
|
||||||
|
|
||||||
@property
|
|
||||||
def vars(self) -> dict:
|
|
||||||
if 'v' in self.data:
|
|
||||||
if type(self.data['v']) is dict:
|
|
||||||
return self.data['v']
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def get_var(self, key: str):
|
|
||||||
if 'v' not in self.data:
|
|
||||||
raise KeyError(f"Variable '{key}' not found.")
|
|
||||||
if str(key) not in self.data['v']:
|
|
||||||
raise KeyError(f"Variable '{key}' not found.")
|
|
||||||
return self.data['v'][str(key)]
|
|
||||||
|
|
||||||
def set_var(self, key: str, value):
|
|
||||||
if 'v' not in self.data:
|
|
||||||
self.data['v'] = {}
|
|
||||||
self.data['v'][str(key)] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data_bytes(self):
|
|
||||||
return packb(self.data)
|
|
||||||
|
|
||||||
def pack(self) -> bytes:
|
|
||||||
output = {'t': self.type.value, 'c': self.compression.value}
|
|
||||||
data_bytes = self.data_bytes
|
|
||||||
logging.debug("Packing Message")
|
|
||||||
if (self.compression is self.CompressionType.NONE) or (len(data_bytes) < 30):
|
|
||||||
output['d'] = data_bytes
|
|
||||||
output['c'] = self.CompressionType.NONE.value
|
|
||||||
return packb(output)
|
|
||||||
|
|
||||||
if self.compression is self.CompressionType.BZIP2:
|
|
||||||
compressed = bz2.compress(packb(self.data))
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(f"Compression type {self.compression.name} is not implemented yet.")
|
|
||||||
|
|
||||||
if len(compressed) < len(data_bytes):
|
|
||||||
output['d'] = compressed
|
|
||||||
else:
|
|
||||||
output['d'] = data_bytes
|
|
||||||
output['c'] = self.CompressionType.NONE.value
|
|
||||||
return packb(output)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def payload(self):
|
|
||||||
if 'd' in self.data:
|
|
||||||
pl = self.data['d']
|
|
||||||
if type(pl) in (dict, str, bytes, list):
|
|
||||||
return pl
|
|
||||||
else:
|
|
||||||
return str(pl)
|
|
||||||
else:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
@payload.setter
|
|
||||||
def payload(self, payload: Union[str, bytes, dict, list]):
|
|
||||||
logging.debug(f"Setting a message payload: {type(payload)}: {payload}")
|
|
||||||
if type(payload) in (str, bytes, dict, list):
|
|
||||||
logging.debug(f"Payload type is {type(payload)}, conversion to string unnecessary")
|
|
||||||
self.data['d'] = payload
|
|
||||||
else:
|
|
||||||
logging.debug("payload type is not in (str, bytes, dict, list); converting to string")
|
|
||||||
self.data['d'] = str(payload)
|
|
||||||
logging.debug(f"Final payload is: {type(payload)}: {payload}")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def partial_unpack(cls, msg: dict) -> Self:
|
|
||||||
unpacked = msg
|
|
||||||
comp = Message.CompressionType(unpacked['c'])
|
|
||||||
msg_type = Message.MessageType(unpacked['t'])
|
|
||||||
raw_data = unpacked['d']
|
|
||||||
|
|
||||||
if comp is Message.CompressionType.NONE:
|
|
||||||
data = unpackb(raw_data)
|
|
||||||
elif comp is Message.CompressionType.BZIP2:
|
|
||||||
data = unpackb(bz2.decompress(raw_data))
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(f"Compression type {comp.name} is not implemented yet.")
|
|
||||||
|
|
||||||
return Message(msg_type, comp, data)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def unpack(cls, msg_bytes: bytes) -> Self:
|
|
||||||
try:
|
|
||||||
unpacked = unpackb(msg_bytes)
|
|
||||||
except Exception as e:
|
|
||||||
raise ValueError("ERROR: msg_bytes didn't contain a valid msgpack object.\n" + str(e))
|
|
||||||
if type(unpacked) is not dict:
|
|
||||||
raise ValueError("ERROR: unpacked message was not a packetserver message.")
|
|
||||||
for i in ('t', 'c', 'd'):
|
|
||||||
if i not in unpacked:
|
|
||||||
raise ValueError("ERROR: unpacked message was not a packetserver message.")
|
|
||||||
return Message.partial_unpack(unpacked)
|
|
||||||
|
|
||||||
class Request(Message):
|
|
||||||
class Method(Enum):
|
|
||||||
GET = 0
|
|
||||||
POST = 1
|
|
||||||
UPDATE = 2
|
|
||||||
DELETE = 3
|
|
||||||
|
|
||||||
def __init__(self, msg: Message):
|
|
||||||
if msg.type is not Message.MessageType.REQUEST:
|
|
||||||
raise ValueError(f"Can't create a Request Object from a {msg.type} Message object.")
|
|
||||||
|
|
||||||
super().__init__(msg.type, msg.compression, msg.data)
|
|
||||||
|
|
||||||
if ('p' in msg.data) and (type(msg.data['p']) is not str):
|
|
||||||
raise ValueError("Path of Request must be a string.")
|
|
||||||
|
|
||||||
if 'p' in self.data:
|
|
||||||
self.data['p'] = str(self.data['p']).strip().lower()
|
|
||||||
|
|
||||||
if 'm' in msg.data:
|
|
||||||
if type(msg.data['m']) is not bytes:
|
|
||||||
raise ValueError("Method of Request must be bytes.")
|
|
||||||
self.Method(int(self.data['m'][0]))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path(self):
|
|
||||||
if 'p' in self.data:
|
|
||||||
return str(self.data['p']).lower().strip()
|
|
||||||
else:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
@path.setter
|
|
||||||
def path(self, path: str):
|
|
||||||
self.data['p'] = str(path).strip().lower()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def method(self) -> Method:
|
|
||||||
if 'm' in self.data:
|
|
||||||
return self.Method(int(self.data['m'][0]))
|
|
||||||
else:
|
|
||||||
return self.Method.GET
|
|
||||||
|
|
||||||
@method.setter
|
|
||||||
def method(self, meth: Method):
|
|
||||||
meth = self.Method(meth)
|
|
||||||
self.data['m'] = meth.value.to_bytes(1)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def unpack(cls, msg_bytes: bytes) -> Self:
|
|
||||||
msg = super().unpack(msg_bytes)
|
|
||||||
return Request(msg)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def blank(cls) -> Self:
|
|
||||||
msg = Message(Message.MessageType.REQUEST, Message.CompressionType.NONE, {})
|
|
||||||
return Request(msg)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Request: {self.method.name} '{self.path}'>"
|
|
||||||
|
|
||||||
class Response(Message):
|
|
||||||
def __init__(self, msg: Message):
|
|
||||||
if msg.type is not Message.MessageType.RESPONSE:
|
|
||||||
raise ValueError(f"Can't create a Response Object from a {msg.type} Message object.")
|
|
||||||
|
|
||||||
super().__init__(msg.type, msg.compression, msg.data)
|
|
||||||
if 'c' in msg.data:
|
|
||||||
status_bytes = self.data['c']
|
|
||||||
if type(status_bytes) is not bytes:
|
|
||||||
raise ValueError("Invalid Response data")
|
|
||||||
status_code = int.from_bytes(status_bytes)
|
|
||||||
if status_code >= 600:
|
|
||||||
raise ValueError("Invalid status code.")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def unpack(cls, msg_bytes: bytes) -> Self:
|
|
||||||
msg = super().unpack(msg_bytes)
|
|
||||||
return Response(msg)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def blank(cls) -> Self:
|
|
||||||
msg = Message(Message.MessageType.RESPONSE, Message.CompressionType.NONE, {})
|
|
||||||
return Response(msg)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def status_code(self) -> int:
|
|
||||||
if 'c' in self.data:
|
|
||||||
status_bytes = self.data['c']
|
|
||||||
if type(status_bytes) is not bytes:
|
|
||||||
raise ValueError("Invalid Response data")
|
|
||||||
status_code = int.from_bytes(status_bytes)
|
|
||||||
if status_code >= 600:
|
|
||||||
raise ValueError("Invalid status code.")
|
|
||||||
return status_code
|
|
||||||
else:
|
|
||||||
return 200
|
|
||||||
|
|
||||||
@status_code.setter
|
|
||||||
def status_code(self, code: int):
|
|
||||||
if (code <= 0) or (code >= 600):
|
|
||||||
raise ValueError("Status must be a positive integer <= 600")
|
|
||||||
self.data['c'] = code.to_bytes(2)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Response: {self.status_code}>"
|
|
||||||
|
|
||||||
def send_response(conn: PacketServerConnection, response: Response, original_request: Request,
|
|
||||||
compression: Message.CompressionType = Message.CompressionType.BZIP2):
|
|
||||||
if conn.state.name == "CONNECTED" and not conn.closing:
|
|
||||||
|
|
||||||
# figure out compression setting based on request
|
|
||||||
logging.debug("Determining compression of response")
|
|
||||||
comp = compression
|
|
||||||
logging.debug(f"Default comp: {comp}")
|
|
||||||
logging.debug(f"Original vars: {original_request.vars}")
|
|
||||||
if 'C' in original_request.vars:
|
|
||||||
logging.debug(f"Detected compression header in original request: {original_request.vars['C']}")
|
|
||||||
val = original_request.vars['C']
|
|
||||||
for i in Message.CompressionType:
|
|
||||||
logging.debug(f"Checking type: {i}")
|
|
||||||
if str(val).strip().upper() == i.name:
|
|
||||||
comp = i
|
|
||||||
logging.debug(f"matched compression with var to {comp}")
|
|
||||||
break
|
|
||||||
try:
|
|
||||||
if int(val) == i.value:
|
|
||||||
comp = i
|
|
||||||
logging.debug(f"matched compression with var to {comp}")
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
response.compression = comp
|
|
||||||
logging.debug(f"Final compression: {response.compression}")
|
|
||||||
|
|
||||||
logging.debug(f"sending response: {response}, {response.compression}, {response.payload}")
|
|
||||||
conn.send_data(response.pack())
|
|
||||||
logging.debug("response sent successfully")
|
|
||||||
else:
|
|
||||||
logging.warning(f"Attempted to send data, but connection state is {conn.state.name}")
|
|
||||||
|
|
||||||
def send_blank_response(conn: PacketServerConnection, original_request: Request, status_code: int = 200,
|
|
||||||
payload: Union[bytes, bytearray, str, dict, list] = ""):
|
|
||||||
response = Response.blank()
|
|
||||||
response.status_code = status_code
|
|
||||||
response.payload = payload
|
|
||||||
send_response(conn, response, original_request)
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
no_values = [0, '0', 'n', 'N', 'f', 'F', 'no', 'NO', False]
|
|
||||||
yes_values = [1, '1', 'y', 'Y', 't', 'T', 'yes', 'YES', True]
|
|
||||||
@@ -1,268 +0,0 @@
|
|||||||
import msgpack
|
|
||||||
|
|
||||||
from . import PacketServerConnection
|
|
||||||
from pe.connect import ConnectionState
|
|
||||||
from msgpack import Unpacker
|
|
||||||
from typing import Union, Self, Optional
|
|
||||||
import os.path
|
|
||||||
import logging
|
|
||||||
import ax25
|
|
||||||
|
|
||||||
class DummyPacketServerConnection(PacketServerConnection):
|
|
||||||
|
|
||||||
def __init__(self, call_from: str, call_to: str, incoming=False):
|
|
||||||
super().__init__(0, call_from, call_to, incoming=incoming)
|
|
||||||
self.sent_data = Unpacker()
|
|
||||||
self._state = ConnectionState.CONNECTED
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self):
|
|
||||||
return self._state
|
|
||||||
|
|
||||||
def send_data(self, data: Union[bytes, bytearray]):
|
|
||||||
self.sent_data.feed(data)
|
|
||||||
logging.debug(f"Sender added {data} to self.sent_data.feed")
|
|
||||||
|
|
||||||
class DirectoryTestServerConnection(PacketServerConnection):
|
|
||||||
"""Monitors a directory for messages in msgpack format."""
|
|
||||||
def __init__(self, call_from: str, call_to: str, directory: str, incoming=False):
|
|
||||||
super().__init__(0, call_from, call_to, incoming=incoming)
|
|
||||||
self._state = ConnectionState.CONNECTED
|
|
||||||
if not os.path.isdir(directory):
|
|
||||||
raise FileNotFoundError(f"No such directory as {directory}")
|
|
||||||
self._directory = directory
|
|
||||||
self._sent_data = Unpacker()
|
|
||||||
self._pid = 1
|
|
||||||
self.closing = False
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_directory_connection(cls, self_callsign: str, directory: str) -> Self:
|
|
||||||
|
|
||||||
if not ax25.Address.valid_call(self_callsign):
|
|
||||||
raise ValueError("self_callsign must be a valid callsign.")
|
|
||||||
|
|
||||||
if not os.path.isdir(directory):
|
|
||||||
raise NotADirectoryError(f"{directory} is not a directory or doesn't exist.")
|
|
||||||
|
|
||||||
spl = os.path.basename(directory).split('--')
|
|
||||||
if len(spl) != 2:
|
|
||||||
raise ValueError(f"Directory {directory} has the wrong name to be a connection dir.")
|
|
||||||
|
|
||||||
src = spl[0]
|
|
||||||
dst = spl[1]
|
|
||||||
|
|
||||||
if not ax25.Address.valid_call(src):
|
|
||||||
raise ValueError(f"Directory {directory} has the wrong name to be a connection dir.")
|
|
||||||
|
|
||||||
if not ax25.Address.valid_call(dst):
|
|
||||||
raise ValueError(f"Directory {directory} has the wrong name to be a connection dir.")
|
|
||||||
|
|
||||||
if dst.upper() == self_callsign.upper():
|
|
||||||
incoming = True
|
|
||||||
else:
|
|
||||||
incoming = False
|
|
||||||
|
|
||||||
return DirectoryTestServerConnection(src, dst, directory, incoming=incoming)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pid(self) -> int:
|
|
||||||
old = self._pid
|
|
||||||
self._pid = self._pid + 1
|
|
||||||
return old
|
|
||||||
|
|
||||||
@property
|
|
||||||
def directory(self) -> str:
|
|
||||||
return self._directory
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self):
|
|
||||||
return self._state
|
|
||||||
|
|
||||||
@property
|
|
||||||
def file_path(self) -> str:
|
|
||||||
file_name = f"{self.local_callsign}.msg"
|
|
||||||
file_path = os.path.join(self._directory, file_name)
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
@property
|
|
||||||
def remote_file_path(self) -> str:
|
|
||||||
file_name = f"{self.remote_callsign}.msg"
|
|
||||||
file_path = os.path.join(self._directory, file_name)
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
def check_closed(self):
|
|
||||||
if self.closing:
|
|
||||||
self._state = ConnectionState.DISCONNECTED
|
|
||||||
if self._state is not ConnectionState.CONNECTED:
|
|
||||||
return True
|
|
||||||
if not os.path.isdir(self._directory):
|
|
||||||
self._state = ConnectionState.DISCONNECTED
|
|
||||||
self.disconnected()
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def write_out(self, data: bytes):
|
|
||||||
if self.check_closed():
|
|
||||||
raise RuntimeError("Connection is closed. Cannot send.")
|
|
||||||
|
|
||||||
if os.path.exists(self.file_path):
|
|
||||||
raise RuntimeError("The outgoing message file already exists. State is wrong for sending.")
|
|
||||||
|
|
||||||
if os.path.exists(self.file_path+".tmp"):
|
|
||||||
os.remove(self.file_path+".tmp")
|
|
||||||
|
|
||||||
open(self.file_path+".tmp", 'wb').write(data)
|
|
||||||
os.rename(self.file_path+".tmp", self.file_path)
|
|
||||||
|
|
||||||
def send_data(self, data: Union[bytes, bytearray]):
|
|
||||||
if self.check_closed():
|
|
||||||
raise RuntimeError("Connection is closed. Cannot send.")
|
|
||||||
self._sent_data.feed(data)
|
|
||||||
logging.debug(f"Sender added {data} to self.sent_data.feed")
|
|
||||||
try:
|
|
||||||
obj = self._sent_data.unpack()
|
|
||||||
self.write_out(msgpack.packb(obj))
|
|
||||||
logging.debug(f"Wrote complete binary message to {self.file_path}")
|
|
||||||
except msgpack.OutOfData as e:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check_for_data(self):
|
|
||||||
"""Monitors connection directory for data."""
|
|
||||||
if self.closing:
|
|
||||||
self._state = ConnectionState.DISCONNECTED
|
|
||||||
if self.check_closed():
|
|
||||||
return
|
|
||||||
|
|
||||||
if os.path.isfile(self.remote_file_path):
|
|
||||||
logging.debug(f"{self.local_callsign} Found that the remote file path '{self.remote_file_path}' exists now.")
|
|
||||||
data = open(self.remote_file_path, 'rb').read()
|
|
||||||
self.data_received(self.pid, bytearray(data))
|
|
||||||
os.remove(self.remote_file_path)
|
|
||||||
logging.debug(f"{self.local_callsign} detected data from {self.remote_callsign}: {msgpack.unpackb(data)}")
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleDirectoryConnection:
|
|
||||||
def __init__(self, call_from: str, call_to: str, directory: str, incoming=False):
|
|
||||||
self._state = ConnectionState.CONNECTED
|
|
||||||
if not os.path.isdir(directory):
|
|
||||||
raise FileNotFoundError(f"No such directory as {directory}")
|
|
||||||
self._directory = directory
|
|
||||||
self._sent_data = Unpacker()
|
|
||||||
self.data = Unpacker()
|
|
||||||
self._pid = 1
|
|
||||||
self.call_to = call_to
|
|
||||||
self.call_from = call_from
|
|
||||||
self.incoming = incoming
|
|
||||||
self._incoming = incoming
|
|
||||||
self.closing = False
|
|
||||||
if incoming:
|
|
||||||
self.local_callsign = call_to
|
|
||||||
self.remote_callsign = call_from
|
|
||||||
else:
|
|
||||||
self.local_callsign = call_from
|
|
||||||
self.remote_callsign = call_to
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_directory_connection(cls, self_callsign: str, directory: str) -> Self:
|
|
||||||
|
|
||||||
if not ax25.Address.valid_call(self_callsign):
|
|
||||||
raise ValueError("self_callsign must be a valid callsign.")
|
|
||||||
|
|
||||||
if not os.path.isdir(directory):
|
|
||||||
raise NotADirectoryError(f"{directory} is not a directory or doesn't exist.")
|
|
||||||
|
|
||||||
spl = os.path.basename(directory).split('--')
|
|
||||||
if len(spl) != 2:
|
|
||||||
raise ValueError(f"Directory {directory} has the wrong name to be a connection dir.")
|
|
||||||
|
|
||||||
src = spl[0]
|
|
||||||
dst = spl[1]
|
|
||||||
|
|
||||||
if not ax25.Address.valid_call(src):
|
|
||||||
raise ValueError(f"Directory {directory} has the wrong name to be a connection dir.")
|
|
||||||
|
|
||||||
if not ax25.Address.valid_call(dst):
|
|
||||||
raise ValueError(f"Directory {directory} has the wrong name to be a connection dir.")
|
|
||||||
|
|
||||||
if dst.upper() == self_callsign.upper():
|
|
||||||
incoming = True
|
|
||||||
else:
|
|
||||||
incoming = False
|
|
||||||
|
|
||||||
return SimpleDirectoryConnection(src, dst, directory, incoming=incoming)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pid(self) -> int:
|
|
||||||
old = self._pid
|
|
||||||
self._pid = self._pid + 1
|
|
||||||
return old
|
|
||||||
|
|
||||||
@property
|
|
||||||
def directory(self) -> str:
|
|
||||||
return self._directory
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self):
|
|
||||||
return self._state
|
|
||||||
|
|
||||||
@property
|
|
||||||
def file_path(self) -> str:
|
|
||||||
file_name = f"{self.local_callsign}.msg"
|
|
||||||
file_path = os.path.join(self._directory, file_name)
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
@property
|
|
||||||
def remote_file_path(self) -> str:
|
|
||||||
file_name = f"{self.remote_callsign}.msg"
|
|
||||||
file_path = os.path.join(self._directory, file_name)
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
def check_closed(self):
|
|
||||||
if self.closing:
|
|
||||||
self._state = ConnectionState.DISCONNECTED
|
|
||||||
if self._state is not ConnectionState.CONNECTED:
|
|
||||||
return True
|
|
||||||
if not os.path.isdir(self._directory):
|
|
||||||
self._state = ConnectionState.DISCONNECTED
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def write_out(self, data: bytes):
|
|
||||||
if self.check_closed():
|
|
||||||
raise RuntimeError("[SIMPLE] Connection is closed. Cannot send.")
|
|
||||||
|
|
||||||
if os.path.exists(self.file_path):
|
|
||||||
raise RuntimeError("[SIMPLE] The outgoing message file already exists. State is wrong for sending.")
|
|
||||||
|
|
||||||
if os.path.exists(self.file_path+".tmp"):
|
|
||||||
os.remove(self.file_path+".tmp")
|
|
||||||
|
|
||||||
open(self.file_path+".tmp", 'wb').write(data)
|
|
||||||
os.rename(self.file_path+".tmp", self.file_path)
|
|
||||||
|
|
||||||
def send_data(self, data: Union[bytes, bytearray]):
|
|
||||||
if self.check_closed():
|
|
||||||
raise RuntimeError("[SIMPLE] Connection is closed. Cannot send.")
|
|
||||||
self._sent_data.feed(data)
|
|
||||||
logging.debug(f"[SIMPLE] Sender added {data} to self.sent_data.feed")
|
|
||||||
try:
|
|
||||||
obj = self._sent_data.unpack()
|
|
||||||
self.write_out(msgpack.packb(obj))
|
|
||||||
logging.debug(f"[SIMPLE] Wrote complete binary message to {self.file_path}")
|
|
||||||
except msgpack.OutOfData as e:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check_for_data(self) -> bool:
|
|
||||||
"""Monitors connection directory for data."""
|
|
||||||
if self.closing:
|
|
||||||
self._state = ConnectionState.DISCONNECTED
|
|
||||||
if self.check_closed():
|
|
||||||
return False
|
|
||||||
if os.path.isfile(self.remote_file_path):
|
|
||||||
data = open(self.remote_file_path, 'rb').read()
|
|
||||||
os.remove(self.remote_file_path)
|
|
||||||
logging.debug(f"[SIMPLE] {self.local_callsign} detected data from {self.remote_callsign}: {data}")
|
|
||||||
self.data.feed(data)
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
import re
|
|
||||||
import datetime
|
|
||||||
import tempfile
|
|
||||||
import tarfile
|
|
||||||
from typing import Union, Iterable, Tuple, Optional, IO
|
|
||||||
import os.path
|
|
||||||
from io import BytesIO, BufferedReader
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
|
|
||||||
def email_valid(email: str) -> bool:
|
|
||||||
"""Taken from https://www.geeksforgeeks.org/check-if-email-address-valid-or-not-in-python/"""
|
|
||||||
regex = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,7}\b'
|
|
||||||
if re.fullmatch(regex, email):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def to_date_digits(index: datetime.datetime) -> str:
|
|
||||||
return f"{str(index.year).zfill(4)}{str(index.month).zfill(2)}{str(index.day).zfill(2)}{str(index.hour).zfill(2)}{str(index.minute).zfill(2)}{str(index.second).zfill(2)}"
|
|
||||||
|
|
||||||
def from_date_digits(index: str, tz: datetime.timezone = datetime.UTC) -> datetime:
|
|
||||||
ind = str(index)
|
|
||||||
if not ind.isdigit():
|
|
||||||
raise ValueError("Received invalid date digit string, containing non-digit chars.")
|
|
||||||
if len(ind) < 4:
|
|
||||||
raise ValueError("Received invalid date digit string, needs to at least by four digits for a year")
|
|
||||||
year = int(ind[:4])
|
|
||||||
month = 1
|
|
||||||
day = 1
|
|
||||||
hour = 0
|
|
||||||
minute = 0
|
|
||||||
second = 0
|
|
||||||
if len(ind) >= 6:
|
|
||||||
month = int(ind[4:6])
|
|
||||||
|
|
||||||
if len(ind) >= 8:
|
|
||||||
day = int(ind[6:8])
|
|
||||||
|
|
||||||
if len(ind) >= 10:
|
|
||||||
hour = int(ind[8:10])
|
|
||||||
|
|
||||||
if len(ind) >= 12:
|
|
||||||
minute = int(ind[10:12])
|
|
||||||
|
|
||||||
if len(ind) >= 14:
|
|
||||||
second = int(ind[12:14])
|
|
||||||
|
|
||||||
return datetime.datetime(year, month, day ,hour, minute, second, tzinfo=tz)
|
|
||||||
|
|
||||||
def tar_bytes(file: Union[str, Iterable]) -> bytes:
|
|
||||||
"""Creates a tar archive in a temporary file with the specified files at root level.
|
|
||||||
Returns the bytes of the archive."""
|
|
||||||
files = []
|
|
||||||
if type(file) is str:
|
|
||||||
files.append(file)
|
|
||||||
else:
|
|
||||||
for i in file:
|
|
||||||
files.append(str(i))
|
|
||||||
|
|
||||||
with tempfile.TemporaryFile() as temp:
|
|
||||||
tar_obj = tarfile.TarFile(fileobj=temp, mode="w")
|
|
||||||
for i in files:
|
|
||||||
tar_obj.add(i, arcname=os.path.basename(i))
|
|
||||||
tar_obj.close()
|
|
||||||
temp.seek(0)
|
|
||||||
return temp.read()
|
|
||||||
|
|
||||||
def bytes_to_tar_bytes(name: str, data: bytes) -> bytes:
|
|
||||||
"""Creates a tar archive with a single file of name <name> with <data> bytes as the contents"""
|
|
||||||
with tempfile.TemporaryFile() as temp:
|
|
||||||
tar_obj = tarfile.TarFile(fileobj=temp, mode="w")
|
|
||||||
bio = BytesIO(data)
|
|
||||||
tar_info = tarfile.TarInfo(name=name)
|
|
||||||
tar_info.size = len(data)
|
|
||||||
tar_obj.addfile(tar_info, bio)
|
|
||||||
tar_obj.close()
|
|
||||||
temp.seek(0)
|
|
||||||
return temp.read()
|
|
||||||
|
|
||||||
def bytes_tar_has_files(data: Union[bytes, IO]):
|
|
||||||
if type(data) is bytes:
|
|
||||||
bio = BytesIO(data)
|
|
||||||
else:
|
|
||||||
bio = data
|
|
||||||
tar_obj = tarfile.TarFile(fileobj=bio, mode="r")
|
|
||||||
files = [m for m in tar_obj.getmembers() if m.isfile()]
|
|
||||||
if len(files) > 0:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def multi_bytes_to_tar_bytes(objects: dict) -> bytes:
|
|
||||||
"""Creates a tar archive with a single file of name <name> with <data> bytes as the contents"""
|
|
||||||
with tempfile.TemporaryFile() as temp:
|
|
||||||
tar_obj = tarfile.TarFile(fileobj=temp, mode="w")
|
|
||||||
for name in objects:
|
|
||||||
data = bytes(objects[name])
|
|
||||||
bio = BytesIO(data)
|
|
||||||
tar_info = tarfile.TarInfo(name=name)
|
|
||||||
tar_info.size = len(data)
|
|
||||||
tar_obj.addfile(tar_info, bio)
|
|
||||||
tar_obj.close()
|
|
||||||
temp.seek(0)
|
|
||||||
return temp.read()
|
|
||||||
|
|
||||||
def extract_tar_bytes(tarfile_bytes: bytes) -> Tuple[str, bytes]:
|
|
||||||
"""Takes the bytes of a tarfile, and returns the name and bytes of the first file in the archive."""
|
|
||||||
out_bytes = b''
|
|
||||||
bio = BytesIO(tarfile_bytes)
|
|
||||||
tar_obj = tarfile.TarFile(fileobj=bio, mode="r")
|
|
||||||
members = tar_obj.getmembers()
|
|
||||||
for i in range(0, len(members)):
|
|
||||||
if members[i].isfile():
|
|
||||||
return members[i].name, tar_obj.extractfile(members[i]).read()
|
|
||||||
raise FileNotFoundError("No files found to extract from archive")
|
|
||||||
|
|
||||||
def random_string(length=8) -> str:
|
|
||||||
rand_str = ''.join(random.choices(string.ascii_letters + string.digits, k=length))
|
|
||||||
return rand_str
|
|
||||||
|
|
||||||
|
|
||||||
class TarFileExtractor(object):
|
|
||||||
"""Generator created from file like object pointing to tar data"""
|
|
||||||
def __init__(self, fileobj: IO):
|
|
||||||
self.fileobj = fileobj
|
|
||||||
try:
|
|
||||||
self.tar_file = tarfile.TarFile(fileobj=self.fileobj)
|
|
||||||
self._raw_members = [m for m in self.tar_file.getmembers() if m.isfile()]
|
|
||||||
except:
|
|
||||||
self._raw_members = []
|
|
||||||
self._count = 0
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
# Python 3 compatibility
|
|
||||||
def __next__(self):
|
|
||||||
return self.next()
|
|
||||||
|
|
||||||
def next(self) -> Tuple[str, IO]:
|
|
||||||
if (self._count + 1) > len(self._raw_members):
|
|
||||||
raise StopIteration()
|
|
||||||
else:
|
|
||||||
member = self._raw_members[self._count]
|
|
||||||
name = member.name
|
|
||||||
if type(name) is bytes:
|
|
||||||
name = name.decode()
|
|
||||||
name = str(name)
|
|
||||||
self._count = self._count + 1
|
|
||||||
return os.path.basename(name), self.tar_file.extractfile(member)
|
|
||||||
@@ -1,191 +0,0 @@
|
|||||||
"""Package runs arbitrary commands/jobs via different mechanisms."""
|
|
||||||
from typing import Union,Optional,Iterable,Self
|
|
||||||
from enum import Enum
|
|
||||||
import datetime
|
|
||||||
from uuid import UUID, uuid4
|
|
||||||
from threading import Lock
|
|
||||||
import os.path
|
|
||||||
from packetserver.runner.constants import job_setup_script, job_end_script, container_setup_script, container_run_script
|
|
||||||
from packetserver.common.util import multi_bytes_to_tar_bytes, bytes_to_tar_bytes, TarFileExtractor
|
|
||||||
|
|
||||||
|
|
||||||
def scripts_tar() -> bytes:
|
|
||||||
return multi_bytes_to_tar_bytes({
|
|
||||||
'job_setup_script.sh': job_setup_script.encode(),
|
|
||||||
'job_end_script.sh': job_end_script.encode(),
|
|
||||||
'container_run_script.sh': container_run_script.encode(),
|
|
||||||
'container_setup_script.sh': container_setup_script.encode()
|
|
||||||
})
|
|
||||||
|
|
||||||
class RunnerFile:
|
|
||||||
def __init__(self, destination_path: str, source_path: str = None, data: bytes = b'', root_owned: bool = False):
|
|
||||||
self._data = data
|
|
||||||
self._source_path = ""
|
|
||||||
|
|
||||||
if source_path is not None:
|
|
||||||
if source_path.strip() != "":
|
|
||||||
if not os.path.isfile(source_path.strip()):
|
|
||||||
raise ValueError("Source Path must point to a file.")
|
|
||||||
self._source_path = source_path.strip()
|
|
||||||
|
|
||||||
self.destination_path = destination_path.strip()
|
|
||||||
if self.destination_path == "":
|
|
||||||
raise ValueError("Destination path cannot be empty.")
|
|
||||||
|
|
||||||
self.root_owned = root_owned
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<RunnerFile: {self.basename}>"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def basename(self) -> str:
|
|
||||||
return os.path.basename(self.destination_path)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def dirname(self) -> str:
|
|
||||||
return os.path.dirname(self.destination_path)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def isabs(self) -> bool:
|
|
||||||
return os.path.isabs(self.destination_path)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data(self) -> bytes:
|
|
||||||
if self._source_path == "":
|
|
||||||
return self._data
|
|
||||||
else:
|
|
||||||
return open(self._source_path, "rb").read()
|
|
||||||
|
|
||||||
def tar_data(self) -> bytes:
|
|
||||||
return bytes_to_tar_bytes(self.basename, self.data)
|
|
||||||
|
|
||||||
class RunnerStatus(Enum):
|
|
||||||
CREATED = 1
|
|
||||||
QUEUED = 2
|
|
||||||
STARTING = 3
|
|
||||||
RUNNING = 4
|
|
||||||
STOPPING = 5
|
|
||||||
SUCCESSFUL = 6
|
|
||||||
FAILED = 7
|
|
||||||
TIMED_OUT = 8
|
|
||||||
|
|
||||||
class Runner:
|
|
||||||
"""Abstract class to take arguments and run a job and track the status and results."""
|
|
||||||
def __init__(self, username: str, args: Union[str, list[str]], job_id: int, environment: Optional[dict] = None,
|
|
||||||
timeout_secs: str = 300, labels: Optional[list] = None,
|
|
||||||
files: list[RunnerFile] = None):
|
|
||||||
self.files = []
|
|
||||||
if files is not None:
|
|
||||||
for f in files:
|
|
||||||
self.files.append(f)
|
|
||||||
self.status = RunnerStatus.CREATED
|
|
||||||
self.username = username.strip().lower()
|
|
||||||
self.args = args
|
|
||||||
self.job_id = int(job_id)
|
|
||||||
self.env = {}
|
|
||||||
self.started_at = datetime.datetime.now()
|
|
||||||
self.finished_at = None
|
|
||||||
self._result = (0,(b'', b''))
|
|
||||||
self._artifact_archive = b''
|
|
||||||
if environment:
|
|
||||||
for key in environment:
|
|
||||||
self.env[key] = environment[key]
|
|
||||||
self.labels = []
|
|
||||||
if type(labels) is list:
|
|
||||||
for l in labels:
|
|
||||||
self.labels.append(l)
|
|
||||||
|
|
||||||
self.timeout_seconds = timeout_secs
|
|
||||||
self.created_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<{type(self).__name__}: {self.username}[{self.job_id}] - {self.status.name}>"
|
|
||||||
|
|
||||||
def is_finished(self) -> bool:
|
|
||||||
if self.status in [RunnerStatus.TIMED_OUT, RunnerStatus.SUCCESSFUL, RunnerStatus.FAILED]:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def is_in_process(self) -> bool:
|
|
||||||
if self.status in [RunnerStatus.QUEUED, RunnerStatus.RUNNING, RunnerStatus.STARTING, RunnerStatus.STOPPING]:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
self.started = datetime.datetime.now()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
raise RuntimeError("Attempting to stop an abstract class.")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output(self) -> bytes:
|
|
||||||
raise RuntimeError("Attempting to interact with an abstract class.")
|
|
||||||
|
|
||||||
def output_str(self) -> str:
|
|
||||||
raise RuntimeError("Attempting to interact with an abstract class.")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def errors(self) -> bytes:
|
|
||||||
raise RuntimeError("Attempting to interact with an abstract class.")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def errors_str(self) -> str:
|
|
||||||
raise RuntimeError("Attempting to interact with an abstract class.")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def return_code(self) -> Optional[int]:
|
|
||||||
raise RuntimeError("Attempting to interact with an abstract class.")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def artifacts(self) -> TarFileExtractor:
|
|
||||||
raise RuntimeError("Attempting to interact with an abstract class.")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def has_artifacts(self) -> bool:
|
|
||||||
raise RuntimeError("Abstract method called.")
|
|
||||||
|
|
||||||
class Orchestrator:
|
|
||||||
"""Abstract class holds configuration and also tracks runners through their lifecycle. Prepares environments to
|
|
||||||
run jobs in runners."""
|
|
||||||
def __init__(self):
|
|
||||||
self.runners = []
|
|
||||||
self.runner_lock = Lock()
|
|
||||||
|
|
||||||
def get_finished_runners(self) -> list[Runner]:
|
|
||||||
return [r for r in self.runners if r.is_finished()]
|
|
||||||
|
|
||||||
def remove_runner(self, job_id: int):
|
|
||||||
runner_object = None
|
|
||||||
for r in self.runners:
|
|
||||||
if r.job_id == job_id:
|
|
||||||
runner_object = r
|
|
||||||
break
|
|
||||||
|
|
||||||
if runner_object is not None:
|
|
||||||
self.runners.remove(runner_object)
|
|
||||||
|
|
||||||
def get_runner_by_id(self, job_id: int) -> Optional[Runner]:
|
|
||||||
for r in self.runners:
|
|
||||||
if r.job_id == job_id:
|
|
||||||
return r
|
|
||||||
|
|
||||||
def runners_available(self) -> bool:
|
|
||||||
"""Abstract. True if a runner can be started. False, if queue is full or orchestrator not ready."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def new_runner(self, username: str, args: Iterable[str], job_id: int, environment: Optional[dict] = None,
|
|
||||||
timeout_secs: str = 300, refresh_db: bool = True, labels: Optional[list] = None,
|
|
||||||
files: list[RunnerFile] = None) -> Runner:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def manage_lifecycle(self):
|
|
||||||
"""When called, updates runner statuses and performs any housekeeping."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
"""Do any setup and then be ready to operate"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
"""Do any cleanup needed."""
|
|
||||||
pass
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
from packetserver.common.util import multi_bytes_to_tar_bytes
|
|
||||||
|
|
||||||
container_setup_script = """#!/bin/bash
|
|
||||||
set -e
|
|
||||||
echo "Place holder for now."
|
|
||||||
"""
|
|
||||||
|
|
||||||
container_run_script = """#!/bin/bash
|
|
||||||
set -e
|
|
||||||
echo "Creating user ${PACKETSERVER_USER}"
|
|
||||||
useradd -m -s /bin/bash "${PACKETSERVER_USER}" -u 1000
|
|
||||||
echo "Creating directories."
|
|
||||||
mkdir -pv "/home/${PACKETSERVER_USER}/.packetserver"
|
|
||||||
mkdir -pv /artifact_output
|
|
||||||
chown -Rv ${PACKETSERVER_USER} "/home/${PACKETSERVER_USER}"
|
|
||||||
echo
|
|
||||||
echo "Looping. Waiting for /root/ENDNOW to exist before stopping."
|
|
||||||
while ! [ -f "/root/ENDNOW" ]; do
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
echo "Ending now.."
|
|
||||||
"""
|
|
||||||
|
|
||||||
job_setup_script = """#!/bin/bash
|
|
||||||
set -e
|
|
||||||
PACKETSERVER_JOB_DIR="/home/${PACKETSERVER_USER}/.packetserver/${PACKETSERVER_JOBID}"
|
|
||||||
mkdir -pv "${PACKETSERVER_JOB_DIR}/artifacts"
|
|
||||||
chown ${PACKETSERVER_USER} "/home/${PACKETSERVER_USER}"
|
|
||||||
chown -R ${PACKETSERVER_USER} "${PACKETSERVER_JOB_DIR}"
|
|
||||||
"""
|
|
||||||
|
|
||||||
job_end_script = """#!/bin/bash
|
|
||||||
set -e
|
|
||||||
PACKETSERVER_JOB_DIR="/home/$PACKETSERVER_USER/.packetserver/${PACKETSERVER_JOBID}"
|
|
||||||
PACKETSERVER_ARTIFACT_DIR="${PACKETSERVER_JOB_DIR}/artifacts"
|
|
||||||
PACKETSERVER_ARTIFACT_TAR="/artifact_output/${PACKETSERVER_JOBID}.tar.gz"
|
|
||||||
tar -czvf "${PACKETSERVER_ARTIFACT_TAR}" -C "${PACKETSERVER_ARTIFACT_DIR}" .
|
|
||||||
rm -rfv "${PACKETSERVER_JOB_DIR}"
|
|
||||||
"""
|
|
||||||
|
|
||||||
podman_bash_start = """ echo 'waiting for /root/scripts/container_run_script.sh to exist'
|
|
||||||
while ! [ -f '/root/scripts/container_run_script.sh' ]; do
|
|
||||||
sleep .1
|
|
||||||
done
|
|
||||||
echo 'entering /root/scripts/container_run_script.sh ...'
|
|
||||||
bash /root/scripts/container_run_script.sh
|
|
||||||
"""
|
|
||||||
podman_run_command = ["bash", "-c", podman_bash_start]
|
|
||||||
@@ -1,463 +0,0 @@
|
|||||||
"""Uses podman to run jobs in containers."""
|
|
||||||
import time
|
|
||||||
|
|
||||||
from ZEO import client
|
|
||||||
|
|
||||||
from . import Runner, Orchestrator, RunnerStatus, RunnerFile, scripts_tar
|
|
||||||
from packetserver.runner.constants import podman_run_command
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
from collections import namedtuple
|
|
||||||
from typing import Optional, Iterable, Union
|
|
||||||
from traceback import format_exc
|
|
||||||
import podman
|
|
||||||
import gzip
|
|
||||||
from podman.domain.containers import Container
|
|
||||||
import podman.errors
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import logging
|
|
||||||
import datetime
|
|
||||||
from os.path import basename, dirname
|
|
||||||
from packetserver.common.util import bytes_to_tar_bytes, random_string, extract_tar_bytes, bytes_tar_has_files, \
|
|
||||||
TarFileExtractor
|
|
||||||
from packetserver import VERSION as packetserver_version
|
|
||||||
import re
|
|
||||||
from threading import Thread
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
env_splitter_rex = '''([a-zA-Z0-9]+)=([a-zA-Z0-9]*)'''
|
|
||||||
|
|
||||||
PodmanOptions = namedtuple("PodmanOptions", ["default_timeout", "max_timeout", "image_name",
|
|
||||||
"max_active_jobs", "container_keepalive", "name_prefix"])
|
|
||||||
|
|
||||||
class PodmanRunner(Runner):
|
|
||||||
def __init__(self, username: str, args: Union[str, list[str]], job_id: int, container: Container,
|
|
||||||
environment: Optional[dict] = None, timeout_secs: str = 300, labels: Optional[list] = None,
|
|
||||||
files: list[RunnerFile] = None):
|
|
||||||
super().__init__(username, args, job_id, environment=environment, timeout_secs=timeout_secs,
|
|
||||||
labels=labels, files=files)
|
|
||||||
self._artifact_archive = b''
|
|
||||||
if not container.inspect()['State']['Running']:
|
|
||||||
raise ValueError(f"Container {container} is not in state Running.")
|
|
||||||
self.container = container
|
|
||||||
self._thread = None
|
|
||||||
self.env['PACKETSERVER_JOBID'] = str(job_id)
|
|
||||||
self.job_path = os.path.join("/home", self.username, ".packetserver", str(job_id))
|
|
||||||
self.archive_path = os.path.join("/artifact_output", f"{str(job_id)}.tar.gz")
|
|
||||||
|
|
||||||
def thread_runner(self):
|
|
||||||
self.status = RunnerStatus.RUNNING
|
|
||||||
logging.debug(f"Thread for runner {self.job_id} started. Command for {(type(self.args))}:\n{self.args}")
|
|
||||||
# run the exec call
|
|
||||||
if type(self.args) is str:
|
|
||||||
logging.debug(f"Running string: {self.args}")
|
|
||||||
res = self.container.exec_run(cmd=self.args, environment=self.env, user=self.username, demux=True,
|
|
||||||
workdir=self.job_path)
|
|
||||||
else:
|
|
||||||
logging.debug(f"Running iterable: {list(self.args)}")
|
|
||||||
res = self.container.exec_run(cmd=list(self.args), environment=self.env, user=self.username, demux=True,
|
|
||||||
workdir=self.job_path)
|
|
||||||
logging.debug(str(res))
|
|
||||||
# cleanup housekeeping
|
|
||||||
self.status = RunnerStatus.STOPPING
|
|
||||||
self._result = res
|
|
||||||
# run cleanup script
|
|
||||||
logging.debug(f"Running cleanup script for {self.job_id}")
|
|
||||||
end_res = self.container.exec_run("bash /root/scripts/job_end_script.sh",
|
|
||||||
environment=self.env, user="root", tty=True)
|
|
||||||
logging.debug(f"End result: {end_res}")
|
|
||||||
if end_res[0] != 0:
|
|
||||||
logging.error(f"End Job script failed:\n{end_res[1].decode()}")
|
|
||||||
# collect any artifacts
|
|
||||||
try:
|
|
||||||
retrieved_tar_bytes = b''.join(self.container.get_archive(self.archive_path)[0])
|
|
||||||
art_tar_bytes = extract_tar_bytes(retrieved_tar_bytes)[1]
|
|
||||||
logging.debug(f"bytes retrieved: {retrieved_tar_bytes}")
|
|
||||||
if bytes_tar_has_files(gzip.GzipFile(fileobj=BytesIO(art_tar_bytes))):
|
|
||||||
logging.debug("found artifacts; attaching to runner object")
|
|
||||||
self._artifact_archive = art_tar_bytes
|
|
||||||
else:
|
|
||||||
logging.debug(f"no artifacts returned for job {self.job_id}")
|
|
||||||
except:
|
|
||||||
logging.warning(f"Error retrieving artifacts for {self.job_id}:\n{format_exc()}")
|
|
||||||
self._artifact_archive = b''
|
|
||||||
self.finished_at = datetime.datetime.now()
|
|
||||||
# set final status to FAILED or SUCCEEDED
|
|
||||||
if self.return_code == 0:
|
|
||||||
self.status = RunnerStatus.SUCCESSFUL
|
|
||||||
else:
|
|
||||||
self.status = RunnerStatus.FAILED
|
|
||||||
|
|
||||||
@property
|
|
||||||
def has_artifacts(self) -> bool:
|
|
||||||
if self._artifact_archive == b'':
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def artifacts(self) -> TarFileExtractor:
|
|
||||||
if self._artifact_archive == b'':
|
|
||||||
return TarFileExtractor(BytesIO(b''))
|
|
||||||
else:
|
|
||||||
return TarFileExtractor(gzip.GzipFile(fileobj=BytesIO(self._artifact_archive)))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output(self) -> bytes:
|
|
||||||
return self._result[1][0]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output_str(self) -> str:
|
|
||||||
try:
|
|
||||||
output = self.output.decode()
|
|
||||||
except:
|
|
||||||
output = str(self.output)
|
|
||||||
return output
|
|
||||||
|
|
||||||
@property
|
|
||||||
def errors(self) -> str:
|
|
||||||
return self._result[1][1]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def errors_str(self) -> str:
|
|
||||||
return self._result[1][1].decode()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def return_code(self) -> int:
|
|
||||||
return self._result[0]
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
logging.debug(f"Starting runner {self.job_id} for {self.username} with command:\n({type(self.args)}){self.args}")
|
|
||||||
self.status = RunnerStatus.STARTING
|
|
||||||
# Run job setup script
|
|
||||||
logging.debug(f"Running job setup script for {self.job_id} runner")
|
|
||||||
setup_res = self.container.exec_run("bash /root/scripts/job_setup_script.sh",
|
|
||||||
environment=self.env, user="root", tty=True)
|
|
||||||
logging.debug(f"Job {self.job_id} setup script:\n{str(setup_res[1])}")
|
|
||||||
if setup_res[0] != 0:
|
|
||||||
self.status = RunnerStatus.FAILED
|
|
||||||
raise RuntimeError(f"Couldn't run setup scripts for {self.job_id}:\n{setup_res[1]}")
|
|
||||||
# put files where they need to be
|
|
||||||
for f in self.files:
|
|
||||||
logging.debug(f"Adding file {f} for job {self.job_id}")
|
|
||||||
if not f.isabs:
|
|
||||||
dest = os.path.join(self.job_path, f.destination_path)
|
|
||||||
dirn = os.path.dirname(dest)
|
|
||||||
else:
|
|
||||||
dest = f.destination_path
|
|
||||||
dirn = f.dirname
|
|
||||||
if self.container.put_archive(dirn, f.tar_data()):
|
|
||||||
logging.debug(f"Placed file {dest} for job {self.job_id}")
|
|
||||||
else:
|
|
||||||
logging.warning(f"Failed to place file {dest} for job {self.job_id}!!")
|
|
||||||
if not f.root_owned:
|
|
||||||
self.container.exec_run(f"chown -R {self.username} {dest}")
|
|
||||||
|
|
||||||
# start thread
|
|
||||||
logging.debug(f"Starting runner thread for {self.job_id}")
|
|
||||||
self._thread = Thread(target=self.thread_runner)
|
|
||||||
super().start()
|
|
||||||
self._thread.start()
|
|
||||||
|
|
||||||
class PodmanOrchestrator(Orchestrator):
|
|
||||||
def __init__(self, uri: Optional[str] = None, options: Optional[PodmanOptions] = None):
|
|
||||||
super().__init__()
|
|
||||||
self.started = False
|
|
||||||
self.user_containers = {}
|
|
||||||
self.manager_thread = None
|
|
||||||
self._client = None
|
|
||||||
self._five_min_ticker = 600
|
|
||||||
|
|
||||||
if uri:
|
|
||||||
self.uri = uri
|
|
||||||
else:
|
|
||||||
self.uri = f"unix:///run/user/{os.getuid()}/podman/podman.sock"
|
|
||||||
uri_parsed = urlparse(self.uri)
|
|
||||||
if uri_parsed.scheme == "unix":
|
|
||||||
if not os.path.exists(uri_parsed.path):
|
|
||||||
raise FileNotFoundError(f"Podman socket not found: {self.uri}")
|
|
||||||
test_client = self.new_client()
|
|
||||||
logging.debug(f"Testing podman socket. Version: {test_client.info()}")
|
|
||||||
self._client = None
|
|
||||||
|
|
||||||
self.username_containers = {}
|
|
||||||
if options:
|
|
||||||
self.opts = options
|
|
||||||
else:
|
|
||||||
self.opts = PodmanOptions(default_timeout=300, max_timeout=3600, image_name="debian", max_active_jobs=5,
|
|
||||||
container_keepalive=300, name_prefix="packetserver_")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def client(self) -> Optional[podman.PodmanClient]:
|
|
||||||
return self._client
|
|
||||||
|
|
||||||
def new_client(self) -> podman.PodmanClient:
|
|
||||||
cli = podman.PodmanClient(base_url=self.uri)
|
|
||||||
self._client = cli
|
|
||||||
return cli
|
|
||||||
|
|
||||||
def add_file_to_user_container(self, username: str, data: bytes, path: str, root_owned=False):
|
|
||||||
cli = self.client
|
|
||||||
file_dir = dirname(path)
|
|
||||||
tar_data_bytes = bytes_to_tar_bytes(basename(path), data)
|
|
||||||
con = cli.containers.get(self.get_container_name(username))
|
|
||||||
res = con.exec_run(cmd=["mkdir", "-p", file_dir], user="root")
|
|
||||||
if res[0] != 1:
|
|
||||||
raise RuntimeError("Couldn't create directory")
|
|
||||||
con.put_archive(file_dir, tar_data_bytes)
|
|
||||||
|
|
||||||
def get_file_from_user_container(self, username: str, path: str) -> bytes :
|
|
||||||
cli = self.client
|
|
||||||
con = cli.containers.get(self.get_container_name(username))
|
|
||||||
tar_result = con.get_archive(path)
|
|
||||||
bytes_tar = b"".join(list(tar_result[0]))
|
|
||||||
return extract_tar_bytes(bytes_tar)[1]
|
|
||||||
|
|
||||||
def podman_container_env(self, container_name: str) -> dict:
|
|
||||||
cli = self.client
|
|
||||||
try:
|
|
||||||
con = cli.containers.get(container_name)
|
|
||||||
splitter = re.compile(env_splitter_rex)
|
|
||||||
env = {}
|
|
||||||
for i in con.inspect()['Config']['Env']:
|
|
||||||
m = splitter.match(i)
|
|
||||||
if m:
|
|
||||||
env[m.groups()[0]] = m.groups()[1]
|
|
||||||
return env
|
|
||||||
except podman.errors.exceptions.NotFound as e:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def podman_user_container_env(self, username: str) -> dict:
|
|
||||||
container_name = self.get_container_name(username)
|
|
||||||
return self.podman_container_env(container_name)
|
|
||||||
|
|
||||||
|
|
||||||
def podman_start_user_container(self, username: str) -> Container:
|
|
||||||
container_env = {
|
|
||||||
"PACKETSERVER_VERSION": packetserver_version,
|
|
||||||
"PACKETSERVER_USER": username.strip().lower()
|
|
||||||
}
|
|
||||||
logging.debug(f"Starting user container for {username} with command {podman_run_command}")
|
|
||||||
con = self.client.containers.create(self.opts.image_name, name=self.get_container_name(username),
|
|
||||||
command=podman_run_command,
|
|
||||||
environment=container_env, user="root")
|
|
||||||
con.start()
|
|
||||||
logging.debug(f"Container started for {username} from image {self.opts.image_name}")
|
|
||||||
started_at = datetime.datetime.now()
|
|
||||||
logging.debug(f"Container state: \n{con.inspect()['State']}")
|
|
||||||
while con.inspect()['State']['Status'] not in ['exited', 'running']:
|
|
||||||
logging.debug("Container state not in ['exited', 'running']")
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
if (now - started_at).total_seconds() > 300:
|
|
||||||
con.stop()
|
|
||||||
con.remove()
|
|
||||||
time.sleep(.1)
|
|
||||||
time.sleep(.5)
|
|
||||||
if con.inspect()['State']['Status'] != 'running':
|
|
||||||
logging.debug(f"Container for {username} isn't running. Cleaning it up.")
|
|
||||||
try:
|
|
||||||
con.stop()
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
con.rename(f"{self.get_container_name(username)}_old")
|
|
||||||
con.remove()
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
raise RuntimeError(f"Couldn't start container for user {username}")
|
|
||||||
if not con.put_archive('/root/scripts', scripts_tar()):
|
|
||||||
con.stop()
|
|
||||||
con.remove()
|
|
||||||
raise RuntimeError("Failed to upload job scripts to container.")
|
|
||||||
res = con.exec_run(cmd=["bash", "/root/scripts/container_setup_script.sh"], tty=True, user="root")
|
|
||||||
logging.debug(f"Container setup script run:\n{res[1].decode()}\nExit Code: {res[0]}")
|
|
||||||
if res[0] != 0:
|
|
||||||
logging.error(f"Container setup script failed:\n{res[1].decode()}\nExit Code: {res[0]}")
|
|
||||||
con.stop()
|
|
||||||
con.remove()
|
|
||||||
raise RuntimeError(f"Container setup script failed:\n{res[1].decode()}\nExit Code: {res[0]}")
|
|
||||||
self.touch_user_container(username)
|
|
||||||
return con
|
|
||||||
|
|
||||||
def podman_remove_container_name(self, container_name: str):
|
|
||||||
cli = self.client
|
|
||||||
logging.debug(f"Attempting to remove container named {container_name}")
|
|
||||||
try:
|
|
||||||
con = cli.containers.get(container_name)
|
|
||||||
if con.inspect()['State']['Status'] == 'running':
|
|
||||||
con.exec_run(cmd="touch /root/ENDNOW", user="root")
|
|
||||||
time.sleep(1)
|
|
||||||
except podman.errors.exceptions.NotFound as e:
|
|
||||||
logging.warning(f"Didn't find container named {container_name}")
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
con.rename(f"{container_name}_{random_string()}")
|
|
||||||
except:
|
|
||||||
logging.error(f"Couldn't rename container:\n{format_exc()}")
|
|
||||||
if con.inspect()['State']['Status'] != 'exited':
|
|
||||||
try:
|
|
||||||
con.stop(timeout=10)
|
|
||||||
except:
|
|
||||||
logging.error(f"Couldn't stop container:\n{format_exc()}")
|
|
||||||
try:
|
|
||||||
con.remove()
|
|
||||||
except:
|
|
||||||
logging.error(f"Couldn't remove container:\n{format_exc()}")
|
|
||||||
return
|
|
||||||
|
|
||||||
def podman_stop_user_container(self, username: str):
|
|
||||||
self.podman_remove_container_name(self.get_container_name(username))
|
|
||||||
if self.get_container_name(username) in self.user_containers:
|
|
||||||
del self.user_containers[self.get_container_name(username)]
|
|
||||||
|
|
||||||
def podman_user_container_exists(self, username: str) -> bool:
|
|
||||||
try:
|
|
||||||
self.client.containers.get(self.get_container_name(username))
|
|
||||||
return True
|
|
||||||
except podman.errors.exceptions.NotFound:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def podman_run_command_simple(self, username: str, command: Iterable[str], as_root: bool = True) -> int:
|
|
||||||
"""Runs command defined by arguments iterable in container. As root by default. Returns exit code."""
|
|
||||||
container_name = self.get_container_name(username)
|
|
||||||
un = username.lower().strip()
|
|
||||||
con = self.client.containers.get(container_name)
|
|
||||||
if as_root:
|
|
||||||
un = 'root'
|
|
||||||
return con.exec_run(list(command), user=un)[0]
|
|
||||||
|
|
||||||
def clean_orphaned_containers(self):
|
|
||||||
cli = self.client
|
|
||||||
for i in cli.containers.list(all=True):
|
|
||||||
if self.opts.name_prefix in str(i.name):
|
|
||||||
if str(i.name) not in self.user_containers:
|
|
||||||
self.podman_remove_container_name(str(i.name))
|
|
||||||
|
|
||||||
def get_container_name(self, username: str) -> str:
|
|
||||||
return self.opts.name_prefix + username.lower().strip()
|
|
||||||
|
|
||||||
def get_username_from_container_name(self, container_name: str) -> str:
|
|
||||||
if not self.opts.name_prefix in container_name:
|
|
||||||
raise ValueError(f"{container_name} is not a user container")
|
|
||||||
return container_name.replace(self.opts.name_prefix, "")
|
|
||||||
|
|
||||||
|
|
||||||
def touch_user_container(self, username: str):
|
|
||||||
self.user_containers[self.get_container_name(username)] = datetime.datetime.now()
|
|
||||||
|
|
||||||
def start_user_container(self, username: str) -> Container:
|
|
||||||
if not self.podman_user_container_exists(username):
|
|
||||||
con = self.podman_start_user_container(username)
|
|
||||||
else:
|
|
||||||
con = self.client.containers.get(self.get_container_name(username))
|
|
||||||
return con
|
|
||||||
|
|
||||||
def clean_containers(self):
|
|
||||||
"""Checks running containers and stops them if they have been running too long."""
|
|
||||||
containers_to_clean = set()
|
|
||||||
for c in self.user_containers:
|
|
||||||
if (datetime.datetime.now() - self.user_containers[c]).total_seconds() > self.opts.container_keepalive:
|
|
||||||
logging.debug(f"Container {c} no activity for {self.opts.container_keepalive} seconds. Clearing.")
|
|
||||||
containers_to_clean.add(c)
|
|
||||||
for c in list(containers_to_clean):
|
|
||||||
self.podman_remove_container_name(c)
|
|
||||||
del self.user_containers[c]
|
|
||||||
|
|
||||||
|
|
||||||
def user_runners_in_process(self, username: str) -> int:
|
|
||||||
un = username.strip().lower()
|
|
||||||
count = 0
|
|
||||||
for r in self.runners:
|
|
||||||
if r.is_in_process:
|
|
||||||
if r.username == un:
|
|
||||||
count = count + 1
|
|
||||||
return count
|
|
||||||
|
|
||||||
def user_running(self, username: str) -> bool:
|
|
||||||
if self.user_runners_in_process(username) > 0:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def runners_in_process(self) -> int:
|
|
||||||
count = 0
|
|
||||||
for r in self.runners:
|
|
||||||
if not r.is_finished():
|
|
||||||
count = count + 1
|
|
||||||
return count
|
|
||||||
|
|
||||||
def runners_available(self) -> bool:
|
|
||||||
if not self.started:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.runners_in_process() < self.opts.max_active_jobs:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def new_runner(self, username: str, args: Union[str, list[str]], job_id: int, environment: Optional[dict] = None,
|
|
||||||
timeout_secs: str = 300, refresh_db: bool = True, labels: Optional[list] = None,
|
|
||||||
files: list[RunnerFile] = None) -> Optional[PodmanRunner]:
|
|
||||||
if not self.started:
|
|
||||||
logging.warning("Attempted to queue a runner when not started")
|
|
||||||
return None
|
|
||||||
with self.runner_lock:
|
|
||||||
if not self.runners_available():
|
|
||||||
logging.warning("Attempted to queue a runner when no runner slots available.")
|
|
||||||
return None
|
|
||||||
con = self.start_user_container(username)
|
|
||||||
logging.debug(f"Started a container for {username} successfully.")
|
|
||||||
self.touch_user_container(username)
|
|
||||||
logging.debug(f"Queuing a runner on container {con}, with command '{args}' of type '{type(args)}'")
|
|
||||||
runner = PodmanRunner(username, args, job_id, con, environment=environment, timeout_secs=timeout_secs,
|
|
||||||
labels=labels, files=files)
|
|
||||||
self.runners.append(runner)
|
|
||||||
runner.start()
|
|
||||||
return runner
|
|
||||||
|
|
||||||
def manage_lifecycle(self):
|
|
||||||
if not self.started:
|
|
||||||
return
|
|
||||||
with self.runner_lock:
|
|
||||||
for r in self.runners:
|
|
||||||
if not r.is_finished():
|
|
||||||
self.touch_user_container(r.username)
|
|
||||||
self.clean_containers()
|
|
||||||
|
|
||||||
if self._five_min_ticker >= 600:
|
|
||||||
self.clean_orphaned_containers()
|
|
||||||
self._five_min_ticker = 0
|
|
||||||
|
|
||||||
def manager(self):
|
|
||||||
logging.debug("Starting podman orchestrator thread.")
|
|
||||||
while self.started:
|
|
||||||
self.manage_lifecycle()
|
|
||||||
time.sleep(.5)
|
|
||||||
logging.debug("Stopping podman orchestrator thread.")
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
if not self.started:
|
|
||||||
self.new_client()
|
|
||||||
self.clean_orphaned_containers()
|
|
||||||
self._five_min_ticker = 0
|
|
||||||
self.started = True
|
|
||||||
self.manager_thread = Thread(target=self.manager)
|
|
||||||
self.manager_thread.start()
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
if self.started:
|
|
||||||
self.stop()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
logging.debug("Stopping podman orchestrator.")
|
|
||||||
self.started = False
|
|
||||||
cli = self.client
|
|
||||||
self.user_containers = {}
|
|
||||||
self.clean_orphaned_containers()
|
|
||||||
if self.manager_thread is not None:
|
|
||||||
logging.debug("Joining orchestrator manager thread.")
|
|
||||||
self.manager_thread.join(timeout=15)
|
|
||||||
logging.debug("Orchestrator manager thread stopped")
|
|
||||||
self.manager_thread = None
|
|
||||||
self._client = None
|
|
||||||
@@ -1,329 +0,0 @@
|
|||||||
import datetime
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import pe.app
|
|
||||||
from packetserver.common import Response, Message, Request, PacketServerConnection, send_response, send_blank_response
|
|
||||||
from packetserver.server.constants import default_server_config
|
|
||||||
from packetserver.server.users import User
|
|
||||||
from copy import deepcopy
|
|
||||||
import ax25
|
|
||||||
from pathlib import Path
|
|
||||||
import ZODB, ZODB.FileStorage
|
|
||||||
from BTrees.OOBTree import OOBTree
|
|
||||||
from persistent.mapping import PersistentMapping
|
|
||||||
from persistent.list import PersistentList
|
|
||||||
from packetserver.server.requests import standard_handlers
|
|
||||||
import logging
|
|
||||||
import signal
|
|
||||||
import time
|
|
||||||
from msgpack.exceptions import OutOfData
|
|
||||||
from typing import Callable, Self, Union
|
|
||||||
from traceback import format_exc
|
|
||||||
from os import linesep
|
|
||||||
from shutil import rmtree
|
|
||||||
from threading import Thread
|
|
||||||
from packetserver.server.jobs import get_orchestrator_from_config, Job, JobStatus
|
|
||||||
from packetserver.runner import RunnerStatus, RunnerFile, Orchestrator, Runner
|
|
||||||
|
|
||||||
VERSION="0.4.1"
|
|
||||||
|
|
||||||
def init_bulletins(root: PersistentMapping):
|
|
||||||
if 'bulletins' not in root:
|
|
||||||
root['bulletins'] = PersistentList()
|
|
||||||
if 'bulletin_counter' not in root:
|
|
||||||
root['bulletin_counter'] = 0
|
|
||||||
|
|
||||||
class Server:
|
|
||||||
def __init__(self, pe_server: str, port: int, server_callsign: str, data_dir: str = None, zeo: bool = True):
|
|
||||||
if not ax25.Address.valid_call(server_callsign):
|
|
||||||
raise ValueError(f"Provided callsign '{server_callsign}' is invalid.")
|
|
||||||
self.callsign = server_callsign
|
|
||||||
self.pe_server = pe_server
|
|
||||||
self.pe_port = port
|
|
||||||
self.handlers = deepcopy(standard_handlers)
|
|
||||||
self.zeo_addr = None
|
|
||||||
self.zeo_stop = None
|
|
||||||
self.zeo = zeo
|
|
||||||
self.started = False
|
|
||||||
self.orchestrator = None
|
|
||||||
self.worker_thread = None
|
|
||||||
self.check_job_queue = True
|
|
||||||
self.last_check_job_queue = datetime.datetime.now()
|
|
||||||
self.job_check_interval = 60
|
|
||||||
self.quick_job = False
|
|
||||||
if data_dir:
|
|
||||||
data_path = Path(data_dir)
|
|
||||||
else:
|
|
||||||
data_path = Path.home().joinpath(".packetserver")
|
|
||||||
if data_path.is_dir():
|
|
||||||
if data_path.joinpath("data.zopedb").exists():
|
|
||||||
if not data_path.joinpath("data.zopedb").is_file():
|
|
||||||
raise FileExistsError("data.zopedb exists as non-file in specified path")
|
|
||||||
self.home_dir = data_path
|
|
||||||
else:
|
|
||||||
if data_path.exists():
|
|
||||||
raise FileExistsError(f"Non-Directory path '{data_dir}' already exists.")
|
|
||||||
else:
|
|
||||||
data_path.mkdir()
|
|
||||||
self.home_dir = data_path
|
|
||||||
self.storage = ZODB.FileStorage.FileStorage(self.data_file)
|
|
||||||
self.db = ZODB.DB(self.storage)
|
|
||||||
with self.db.transaction() as conn:
|
|
||||||
logging.debug(f"checking for datastructures: conn.root.keys(): {list(conn.root().keys())}")
|
|
||||||
if 'config' not in conn.root():
|
|
||||||
logging.debug("no config, writing blank default config")
|
|
||||||
conn.root.config = PersistentMapping(deepcopy(default_server_config))
|
|
||||||
conn.root.config['blacklist'] = PersistentList()
|
|
||||||
if 'SYSTEM' not in conn.root.config['blacklist']:
|
|
||||||
logging.debug("Adding 'SYSTEM' to blacklist in case someone feels like violating FCC rules.")
|
|
||||||
conn.root.config['blacklist'].append('SYSTEM')
|
|
||||||
if 'users' not in conn.root():
|
|
||||||
logging.debug("users missing, creating bucket")
|
|
||||||
conn.root.users = PersistentMapping()
|
|
||||||
if 'messages' not in conn.root():
|
|
||||||
logging.debug("messages container missing, creating bucket")
|
|
||||||
conn.root.messages = PersistentMapping()
|
|
||||||
if 'SYSTEM' not in conn.root.users:
|
|
||||||
logging.debug("Creating system user for first time.")
|
|
||||||
User('SYSTEM', hidden=True, enabled=False).write_new(conn.root())
|
|
||||||
if 'objects' not in conn.root():
|
|
||||||
logging.debug("objects bucket missing, creating")
|
|
||||||
conn.root.objects = OOBTree()
|
|
||||||
if 'jobs' not in conn.root():
|
|
||||||
logging.debug("jobss bucket missing, creating")
|
|
||||||
conn.root.jobs = OOBTree()
|
|
||||||
if 'job_queue' not in conn.root():
|
|
||||||
conn.root.job_queue = PersistentList()
|
|
||||||
if 'user_jobs' not in conn.root():
|
|
||||||
conn.root.user_jobs = PersistentMapping()
|
|
||||||
init_bulletins(conn.root())
|
|
||||||
if ('jobs_enabled' in conn.root.config) and conn.root.config['jobs_enabled']:
|
|
||||||
logging.debug(conn.root.config['jobs_enabled'])
|
|
||||||
logging.debug(conn.root.config['jobs_config'])
|
|
||||||
if 'runner' in conn.root.config['jobs_config']:
|
|
||||||
val = str(conn.root.config['jobs_config']['runner']).lower().strip()
|
|
||||||
if val in ['podman']:
|
|
||||||
logging.debug(f"Enabling {val} orchestrator")
|
|
||||||
self.orchestrator = get_orchestrator_from_config(conn.root.config['jobs_config'])
|
|
||||||
|
|
||||||
self.app = pe.app.Application()
|
|
||||||
PacketServerConnection.receive_subscribers.append(lambda x: self.server_receiver(x))
|
|
||||||
PacketServerConnection.connection_subscribers.append(lambda x: self.server_connection_bouncer(x))
|
|
||||||
signal.signal(signal.SIGINT, self.exit_gracefully)
|
|
||||||
signal.signal(signal.SIGTERM, self.exit_gracefully)
|
|
||||||
self.db.close()
|
|
||||||
self.storage.close()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data_file(self) -> str:
|
|
||||||
return str(Path(self.home_dir).joinpath('data.zopedb'))
|
|
||||||
|
|
||||||
def ping_job_queue(self):
|
|
||||||
self.check_job_queue = True
|
|
||||||
self.last_check_job_queue = datetime.datetime.now()
|
|
||||||
if self.quick_job:
|
|
||||||
logging.debug("Setting the final quick job timer.")
|
|
||||||
self.job_check_interval = 5
|
|
||||||
self.quick_job = False
|
|
||||||
else:
|
|
||||||
self.job_check_interval = 60
|
|
||||||
|
|
||||||
def server_connection_bouncer(self, conn: PacketServerConnection):
|
|
||||||
logging.debug("new connection bouncer checking user status")
|
|
||||||
# blacklist check
|
|
||||||
blacklisted = False
|
|
||||||
base = ax25.Address(conn.remote_callsign).call
|
|
||||||
with self.db.transaction() as storage:
|
|
||||||
if 'blacklist' in storage.root.config:
|
|
||||||
bl = storage.root.config['blacklist']
|
|
||||||
logging.debug(f"A blacklist exists: {bl}")
|
|
||||||
logging.debug(f"Checking callsign {base.upper()}")
|
|
||||||
if base.upper() in bl:
|
|
||||||
logging.debug(f"Connection from blacklisted callsign {base}")
|
|
||||||
conn.closing = True
|
|
||||||
blacklisted = True
|
|
||||||
|
|
||||||
# user object check
|
|
||||||
logging.debug(f"checking user existence for {base}")
|
|
||||||
logging.debug(f"users in db right now: {list(storage.root.users.keys())}")
|
|
||||||
if base in storage.root.users:
|
|
||||||
logging.debug(f"User {base} exists in db.")
|
|
||||||
u = storage.root.users[base]
|
|
||||||
u.seen()
|
|
||||||
else:
|
|
||||||
logging.debug(f"User {base} doesn't exist in db")
|
|
||||||
logging.info(f"Creating new user {base}")
|
|
||||||
u = User(base.upper().strip())
|
|
||||||
u.write_new(storage.root())
|
|
||||||
if blacklisted:
|
|
||||||
count = 0
|
|
||||||
while count < 10:
|
|
||||||
time.sleep(.5)
|
|
||||||
if conn.state.name == "CONNECTED":
|
|
||||||
break
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def handle_request(self, req: Request, conn: PacketServerConnection):
|
|
||||||
"""Handles a proper request by handing off to the appropriate function depending on method and Path."""
|
|
||||||
logging.debug(f"asked to handle request: {req}")
|
|
||||||
if conn.closing:
|
|
||||||
logging.debug("Connection marked as closing. Ignoring it.")
|
|
||||||
return
|
|
||||||
req_root_path = req.path.split("/")[0]
|
|
||||||
if ('quick' in req.vars) or (req_root_path == "job"):
|
|
||||||
logging.debug("Setting quick job timer for a quick job.")
|
|
||||||
self.job_check_interval = 8
|
|
||||||
self.quick_job = True
|
|
||||||
if req_root_path in self.handlers:
|
|
||||||
logging.debug(f"found handler for req {req}")
|
|
||||||
self.handlers[req_root_path](req, conn, self.db)
|
|
||||||
return
|
|
||||||
logging.warning(f"unhandled request found: {req}")
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
|
|
||||||
def process_incoming_data(self, connection: PacketServerConnection):
|
|
||||||
"""Handles incoming data."""
|
|
||||||
logging.debug("Running process_incoming_data on connection")
|
|
||||||
with connection.data_lock:
|
|
||||||
logging.debug("Data lock acquired")
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
msg = Message.partial_unpack(connection.data.unpack())
|
|
||||||
logging.debug(f"parsed a Message from data received")
|
|
||||||
except OutOfData:
|
|
||||||
logging.debug("no complete message yet, done until more data arrives")
|
|
||||||
break
|
|
||||||
except ValueError:
|
|
||||||
connection.send_data(b"BAD REQUEST. COULD NOT PARSE INCOMING DATA AS PACKETSERVER MESSAGE")
|
|
||||||
try:
|
|
||||||
request = Request(msg)
|
|
||||||
logging.debug(f"parsed Message into request {request}")
|
|
||||||
except ValueError:
|
|
||||||
connection.send_data(b"BAD REQUEST. DID NOT RECEIVE A REQUEST MESSAGE.")
|
|
||||||
logging.debug(f"attempting to handle request {request}")
|
|
||||||
self.handle_request(request, connection)
|
|
||||||
self.ping_job_queue()
|
|
||||||
logging.debug("request handled")
|
|
||||||
|
|
||||||
def server_receiver(self, conn: PacketServerConnection):
|
|
||||||
logging.debug("running server receiver")
|
|
||||||
try:
|
|
||||||
self.process_incoming_data(conn)
|
|
||||||
except Exception:
|
|
||||||
logging.debug(f"Unhandled exception while processing incoming data:\n{format_exc()}")
|
|
||||||
|
|
||||||
def register_path_handler(self, path_root: str, fn: Callable):
|
|
||||||
self.handlers[path_root.strip().lower()] = fn
|
|
||||||
|
|
||||||
def server_worker(self):
|
|
||||||
"""When called, do things. Should get called every so often."""
|
|
||||||
if not self.started:
|
|
||||||
return
|
|
||||||
# Add things to do here:
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
if (now - self.last_check_job_queue).total_seconds() > self.job_check_interval:
|
|
||||||
self.ping_job_queue()
|
|
||||||
if (self.orchestrator is not None) and self.orchestrator.started and self.check_job_queue:
|
|
||||||
with self.db.transaction() as storage:
|
|
||||||
# queue as many jobs as possible
|
|
||||||
while self.orchestrator.runners_available():
|
|
||||||
if len(storage.root.job_queue) > 0:
|
|
||||||
jid = storage.root.job_queue[0]
|
|
||||||
try:
|
|
||||||
logging.info(f"Starting job {jid}")
|
|
||||||
job = Job.get_job_by_id(jid, storage.root())
|
|
||||||
except:
|
|
||||||
logging.error(f"Error retrieving job {jid}")
|
|
||||||
break
|
|
||||||
runner = self.orchestrator.new_runner(job.owner, job.cmd, jid, environment=job.env, files=job.files)
|
|
||||||
if runner is not None:
|
|
||||||
storage.root.job_queue.remove(jid)
|
|
||||||
job.status = JobStatus.RUNNING
|
|
||||||
job.started_at = datetime.datetime.now()
|
|
||||||
logging.info(f"Started job {job}")
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
if len(storage.root.job_queue) == 0:
|
|
||||||
self.check_job_queue = False
|
|
||||||
else:
|
|
||||||
self.ping_job_queue()
|
|
||||||
|
|
||||||
finished_runners = []
|
|
||||||
for runner in self.orchestrator.runners:
|
|
||||||
if runner.is_finished():
|
|
||||||
logging.debug(f"Finishing runner {runner}")
|
|
||||||
with self.db.transaction() as storage:
|
|
||||||
try:
|
|
||||||
if Job.update_job_from_runner(runner, storage.root()):
|
|
||||||
finished_runners.append(runner)
|
|
||||||
logging.info(f"Runner {runner} successfully synced with jobs.")
|
|
||||||
else:
|
|
||||||
logging.error(f"update_job_from_runner returned False.")
|
|
||||||
logging.error(f"Error while finishing runner and updating job status {runner}")
|
|
||||||
except:
|
|
||||||
logging.error(f"Error while finishing runner and updating job status {runner}\n:{format_exc()}")
|
|
||||||
for runner in finished_runners:
|
|
||||||
logging.info(f"Removing completed runner {runner}")
|
|
||||||
with self.orchestrator.runner_lock:
|
|
||||||
self.orchestrator.runners.remove(runner)
|
|
||||||
|
|
||||||
def run_worker(self):
|
|
||||||
"""Intended to be running as a thread."""
|
|
||||||
logging.info("Starting worker thread.")
|
|
||||||
while self.started:
|
|
||||||
self.server_worker()
|
|
||||||
time.sleep(.5)
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
self.stop()
|
|
||||||
|
|
||||||
def start_db(self):
|
|
||||||
if not self.zeo:
|
|
||||||
self.storage = ZODB.FileStorage.FileStorage(self.data_file)
|
|
||||||
self.db = ZODB.DB(self.storage)
|
|
||||||
else:
|
|
||||||
import ZEO
|
|
||||||
address, stop = ZEO.server(path=self.data_file)
|
|
||||||
self.zeo_addr = address
|
|
||||||
self.zeo_stop = stop
|
|
||||||
self.db = ZEO.DB(self.zeo_addr)
|
|
||||||
logging.info(f"Starting ZEO server with address {self.zeo_addr}")
|
|
||||||
try:
|
|
||||||
zeo_address_file = str(self.home_dir.joinpath("zeo-address.txt"))
|
|
||||||
open(zeo_address_file, 'w').write(f"{self.zeo_addr[0]}:{self.zeo_addr[1]}{linesep}")
|
|
||||||
logging.info(f"Wrote ZEO server info to '{zeo_address_file}'")
|
|
||||||
except:
|
|
||||||
logging.warning(f"Couldn't write ZEO server info to '{zeo_address_file}'\n{format_exc()}")
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
self.start_db()
|
|
||||||
self.app.start(self.pe_server, self.pe_port)
|
|
||||||
self.app.register_callsigns(self.callsign)
|
|
||||||
self.started = True
|
|
||||||
if self.orchestrator is not None:
|
|
||||||
logging.info(f"Starting orchestrator {self.orchestrator}")
|
|
||||||
self.orchestrator.start()
|
|
||||||
self.worker_thread = Thread(target=self.run_worker)
|
|
||||||
self.worker_thread.start()
|
|
||||||
|
|
||||||
def exit_gracefully(self, signum, frame):
|
|
||||||
self.stop()
|
|
||||||
|
|
||||||
def stop_db(self):
|
|
||||||
self.storage.close()
|
|
||||||
self.db.close()
|
|
||||||
if self.zeo:
|
|
||||||
logging.info("Stopping ZEO.")
|
|
||||||
self.zeo_stop()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.started = False
|
|
||||||
cm = self.app._engine._active_handler._handlers[1]._connection_map
|
|
||||||
for key in cm._connections.keys():
|
|
||||||
cm._connections[key].close()
|
|
||||||
if self.orchestrator is not None:
|
|
||||||
self.orchestrator.stop()
|
|
||||||
self.app.stop()
|
|
||||||
self.stop_db()
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,159 +0,0 @@
|
|||||||
import ax25
|
|
||||||
import persistent
|
|
||||||
import persistent.list
|
|
||||||
from persistent.mapping import PersistentMapping
|
|
||||||
import datetime
|
|
||||||
from typing import Self,Union,Optional
|
|
||||||
from packetserver.common import PacketServerConnection, Request, Response, Message, send_response, send_blank_response
|
|
||||||
import ZODB
|
|
||||||
import logging
|
|
||||||
from packetserver.server.users import user_authorized
|
|
||||||
|
|
||||||
def get_new_bulletin_id(root: PersistentMapping) -> int:
|
|
||||||
if 'bulletin_counter' not in root:
|
|
||||||
root['bulletin_counter'] = 1
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
current = root['bulletin_counter']
|
|
||||||
root['bulletin_counter'] = current + 1
|
|
||||||
return current
|
|
||||||
|
|
||||||
class Bulletin(persistent.Persistent):
|
|
||||||
@classmethod
|
|
||||||
def get_bulletin_by_id(cls, bid: int, db_root: PersistentMapping) -> Optional[Self]:
|
|
||||||
for bull in db_root['bulletins']:
|
|
||||||
if bull.id == bid:
|
|
||||||
return bull
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_recent_bulletins(cls, db_root: PersistentMapping, limit: int = None) -> list:
|
|
||||||
all_bulletins = sorted(db_root['bulletins'], key=lambda bulletin: bulletin.updated_at, reverse=True)
|
|
||||||
if not limit:
|
|
||||||
return all_bulletins
|
|
||||||
else:
|
|
||||||
if len(all_bulletins) < limit:
|
|
||||||
return all_bulletins
|
|
||||||
else:
|
|
||||||
return all_bulletins[:limit]
|
|
||||||
|
|
||||||
def __init__(self, author: str, subject: str, text: str):
|
|
||||||
self.author = author
|
|
||||||
self.subject = subject
|
|
||||||
self.body = text
|
|
||||||
self.created_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.updated_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.id = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(cls, bulletin_dict: dict) -> Self:
|
|
||||||
return Bulletin(bulletin_dict['author'], bulletin_dict['subject'], bulletin_dict['body'])
|
|
||||||
|
|
||||||
def write_new(self, db_root: PersistentMapping) -> int:
|
|
||||||
if self.id is None:
|
|
||||||
self.id = get_new_bulletin_id(db_root)
|
|
||||||
self.created_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.updated_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
db_root['bulletins'].append(self)
|
|
||||||
return self.id
|
|
||||||
|
|
||||||
def update_subject(self, new_text: str):
|
|
||||||
self.subject = new_text
|
|
||||||
self.updated_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
|
|
||||||
def update_body(self, new_text: str):
|
|
||||||
self.body = new_text
|
|
||||||
self.updated_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
return {
|
|
||||||
"id": self.id,
|
|
||||||
"author": self.author,
|
|
||||||
"subject": self.subject,
|
|
||||||
"body": self.body,
|
|
||||||
"created_at": self.created_at.isoformat(),
|
|
||||||
"updated_at": self.updated_at.isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def handle_bulletin_get(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
response = Response.blank()
|
|
||||||
sp = req.path.split("/")
|
|
||||||
logging.debug(f"bulletin get path: {sp}")
|
|
||||||
bid = None
|
|
||||||
limit = None
|
|
||||||
if 'limit' in req.vars:
|
|
||||||
try:
|
|
||||||
limit = int(req.vars['limit'])
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
if 'id' in req.vars:
|
|
||||||
try:
|
|
||||||
bid = int(req.vars['id'])
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
if len(sp) > 1:
|
|
||||||
logging.debug(f"checking path for bulletin id")
|
|
||||||
try:
|
|
||||||
logging.debug(f"{sp[1]}")
|
|
||||||
bid = int(sp[1].strip())
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
logging.debug(f"bid is {bid}")
|
|
||||||
|
|
||||||
with db.transaction() as db:
|
|
||||||
if bid is not None:
|
|
||||||
logging.debug(f"retrieving bulletin: {bid}")
|
|
||||||
bull = Bulletin.get_bulletin_by_id(bid, db.root())
|
|
||||||
if bull:
|
|
||||||
response.payload = bull.to_dict()
|
|
||||||
response.status_code = 200
|
|
||||||
else:
|
|
||||||
response.status_code = 404
|
|
||||||
else:
|
|
||||||
logging.debug(f"retrieving all bulletins")
|
|
||||||
bulls = Bulletin.get_recent_bulletins(db.root(), limit=limit)
|
|
||||||
response.payload = [bulletin.to_dict() for bulletin in bulls]
|
|
||||||
response.status_code = 200
|
|
||||||
|
|
||||||
send_response(conn, response, req)
|
|
||||||
|
|
||||||
def handle_bulletin_post(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
author = ax25.Address(conn.remote_callsign).call
|
|
||||||
if type(req.payload) is not dict:
|
|
||||||
send_blank_response(conn, req, 400, payload="Include dict in payload with subject and body")
|
|
||||||
if 'subject' not in req.payload:
|
|
||||||
send_blank_response(conn, req, 400, payload="Include dict in payload with subject and body")
|
|
||||||
if 'body' not in req.payload:
|
|
||||||
send_blank_response(conn, req, 400, payload="Include dict in payload with subject and body")
|
|
||||||
b = Bulletin(author, str(req.payload['subject']), str(req.payload['body']))
|
|
||||||
response = Response.blank()
|
|
||||||
with db.transaction() as db:
|
|
||||||
bid = b.write_new(db.root())
|
|
||||||
send_blank_response(conn, req, status_code=201, payload={'bulletin_id': bid})
|
|
||||||
|
|
||||||
def handle_bulletin_update(req: Request, conn: PacketServerConnection, db: ZODB.DB): # TODO
|
|
||||||
response = Response.blank()
|
|
||||||
with db.transaction() as db:
|
|
||||||
pass
|
|
||||||
send_response(conn, response, req)
|
|
||||||
|
|
||||||
def handle_bulletin_delete(req: Request, conn: PacketServerConnection, db: ZODB.DB): # TODO
|
|
||||||
response = Response.blank()
|
|
||||||
with db.transaction() as db:
|
|
||||||
pass
|
|
||||||
send_response(conn, response, req)
|
|
||||||
|
|
||||||
def bulletin_root_handler(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
logging.debug(f"{req} being processed by bulletin_root_handler")
|
|
||||||
if not user_authorized(conn, db):
|
|
||||||
logging.debug(f"user {conn.remote_callsign} not authorized")
|
|
||||||
send_blank_response(conn, req, status_code=401)
|
|
||||||
return
|
|
||||||
logging.debug("user is authorized")
|
|
||||||
if req.method is Request.Method.GET:
|
|
||||||
handle_bulletin_get(req, conn, db)
|
|
||||||
elif req.method is Request.Method.POST:
|
|
||||||
handle_bulletin_post(req, conn, db)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
|
|
||||||
default_server_config = {
|
|
||||||
"motd": "Welcome to this PacketServer BBS!",
|
|
||||||
"operator": "placeholder",
|
|
||||||
"max_message_length": 2000
|
|
||||||
}
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
import ZODB
|
|
||||||
import json
|
|
||||||
import gzip
|
|
||||||
import base64
|
|
||||||
from io import BytesIO
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
def get_user_db(username: str, db: ZODB.DB) -> dict:
|
|
||||||
udb = {
|
|
||||||
"objects": [],
|
|
||||||
"messages": [],
|
|
||||||
"user": {},
|
|
||||||
"bulletins": [],
|
|
||||||
"jobs": []
|
|
||||||
}
|
|
||||||
username = username.strip().upper()
|
|
||||||
with (db.transaction() as db_conn):
|
|
||||||
user = db_conn.root.users[username]
|
|
||||||
udb['user'] = user.to_safe_dict()
|
|
||||||
for o in user.object_uuids:
|
|
||||||
obj = {}
|
|
||||||
tmp = db_conn.root.objects[o].to_dict()
|
|
||||||
|
|
||||||
obj['name'] = tmp['name']
|
|
||||||
obj['private'] = tmp['private']
|
|
||||||
obj['uuid'] = str(UUID(bytes=tmp['uuid_bytes']))
|
|
||||||
obj['created_at'] = tmp['created_at']
|
|
||||||
obj['modified_at'] = tmp['modified_at']
|
|
||||||
|
|
||||||
if type(tmp['data']) is bytes:
|
|
||||||
obj['data'] = base64.b64encode(tmp['data']).decode()
|
|
||||||
else:
|
|
||||||
obj['data'] = str(tmp['data'])
|
|
||||||
|
|
||||||
udb['objects'].append(obj)
|
|
||||||
|
|
||||||
if user in db_conn.root.messages:
|
|
||||||
for m in db_conn.root.messages[username]:
|
|
||||||
for a in m.attachments:
|
|
||||||
if type(a.data) is bytes:
|
|
||||||
a.data = base64.b64encode(a.data).decode()
|
|
||||||
else:
|
|
||||||
a.data = base64.b64encode(a.data.encode()).decode()
|
|
||||||
udb['messages'].append(m.to_dict())
|
|
||||||
for b in db_conn.root.bulletins:
|
|
||||||
udb['bulletins'].append(b.to_dict())
|
|
||||||
|
|
||||||
if username in db_conn.root.user_jobs:
|
|
||||||
for jid in db_conn.root.user_jobs[username]:
|
|
||||||
udb['jobs'].append(db_conn.root.jobs[jid].to_dict(binary_safe=True))
|
|
||||||
|
|
||||||
return udb
|
|
||||||
|
|
||||||
def get_user_db_json(username: str, db: ZODB.DB, gzip_output=True) -> bytes:
|
|
||||||
udb = get_user_db(username, db)
|
|
||||||
j = json.dumps(udb).encode()
|
|
||||||
if gzip_output:
|
|
||||||
return gzip.compress(j)
|
|
||||||
else:
|
|
||||||
return j
|
|
||||||
@@ -1,382 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
import ax25
|
|
||||||
import persistent
|
|
||||||
import persistent.list
|
|
||||||
from persistent.mapping import PersistentMapping
|
|
||||||
import datetime
|
|
||||||
from typing import Self,Union,Optional,Tuple
|
|
||||||
from traceback import format_exc
|
|
||||||
from packetserver.common import PacketServerConnection, Request, Response, Message, send_response, send_blank_response
|
|
||||||
from packetserver.common.constants import no_values, yes_values
|
|
||||||
from packetserver.server.db import get_user_db_json
|
|
||||||
import ZODB
|
|
||||||
from persistent.list import PersistentList
|
|
||||||
import logging
|
|
||||||
from packetserver.server.users import user_authorized
|
|
||||||
import gzip
|
|
||||||
import tarfile
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
from packetserver.common.util import TarFileExtractor
|
|
||||||
from packetserver.runner import Orchestrator, Runner, RunnerStatus, RunnerFile
|
|
||||||
from enum import Enum
|
|
||||||
from io import BytesIO
|
|
||||||
import base64
|
|
||||||
|
|
||||||
class JobStatus(Enum):
|
|
||||||
CREATED = 1
|
|
||||||
QUEUED = 2
|
|
||||||
STARTING = 3
|
|
||||||
RUNNING = 4
|
|
||||||
STOPPING = 5
|
|
||||||
SUCCESSFUL = 6
|
|
||||||
FAILED = 7
|
|
||||||
TIMED_OUT = 8
|
|
||||||
|
|
||||||
def get_orchestrator_from_config(cfg: dict) -> Orchestrator:
|
|
||||||
if 'runner' in cfg:
|
|
||||||
val = cfg['runner'].lower().strip()
|
|
||||||
if val == "podman":
|
|
||||||
from packetserver.runner.podman import PodmanOrchestrator, PodmanOptions
|
|
||||||
image = cfg.get('image', 'debian')
|
|
||||||
opts = PodmanOptions(default_timeout=300, max_timeout=3600, image_name=image, max_active_jobs=5,
|
|
||||||
container_keepalive=300, name_prefix="packetserver_")
|
|
||||||
orch = PodmanOrchestrator(options=opts)
|
|
||||||
return orch
|
|
||||||
else:
|
|
||||||
raise RuntimeError("Other orchestrators not implemented yet.")
|
|
||||||
else:
|
|
||||||
raise RuntimeError("Runners not configured in root.config.jobs_config")
|
|
||||||
|
|
||||||
def get_new_job_id(root: PersistentMapping) -> int:
|
|
||||||
if 'job_counter' not in root:
|
|
||||||
root['job_counter'] = 1
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
current = root['job_counter']
|
|
||||||
root['job_counter'] = current + 1
|
|
||||||
return current
|
|
||||||
|
|
||||||
class Job(persistent.Persistent):
|
|
||||||
@classmethod
|
|
||||||
def update_job_from_runner(cls, runner: Runner, db_root: PersistentMapping) -> True:
|
|
||||||
job = Job.get_job_by_id(runner.job_id, db_root)
|
|
||||||
if job is None:
|
|
||||||
logging.warning(f"Couldn't match runner {runner} with a job by id.")
|
|
||||||
return False
|
|
||||||
if not runner.is_finished():
|
|
||||||
return False
|
|
||||||
job.finished_at = datetime.datetime.now()
|
|
||||||
job.output = runner.output
|
|
||||||
job.errors = runner.errors
|
|
||||||
job.return_code = runner.return_code
|
|
||||||
job._artifact_archive = runner._artifact_archive
|
|
||||||
if runner.status == RunnerStatus.SUCCESSFUL:
|
|
||||||
job.status = JobStatus.SUCCESSFUL
|
|
||||||
else:
|
|
||||||
job.status = JobStatus.FAILED
|
|
||||||
return True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_job_by_id(cls, jid: int, db_root: PersistentMapping) -> Optional[Self]:
|
|
||||||
if jid in db_root['jobs']:
|
|
||||||
return db_root['jobs'][jid]
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_jobs_by_username(cls, username:str, db_root: PersistentMapping) -> list[Self]:
|
|
||||||
un = username.strip().upper()
|
|
||||||
if un in db_root['user_jobs']:
|
|
||||||
l = []
|
|
||||||
for j in db_root['user_jobs'][un]:
|
|
||||||
l.append(Job.get_job_by_id(j, db_root))
|
|
||||||
return l
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def num_jobs_queued(cls, db_root: PersistentMapping) -> int:
|
|
||||||
return len(db_root['job_queue'])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def jobs_in_queue(cls, db_root: PersistentMapping) -> bool:
|
|
||||||
if Job.num_jobs_queued(db_root) > 0:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_next_queued_job(cls, db_root: PersistentMapping) -> Self:
|
|
||||||
return db_root['job_queue'][0]
|
|
||||||
|
|
||||||
def __init__(self, cmd: Union[list[str], str], owner: Optional[str] = None, timeout: int = 300,
|
|
||||||
env: dict = None, files: list[RunnerFile] = None):
|
|
||||||
self.owner = None
|
|
||||||
if owner is not None:
|
|
||||||
self.owner = str(owner).upper().strip()
|
|
||||||
self.cmd = cmd
|
|
||||||
self.env = {}
|
|
||||||
if env is not None:
|
|
||||||
for key in env:
|
|
||||||
self.env[key] = env[key]
|
|
||||||
self.files = []
|
|
||||||
if files is not None:
|
|
||||||
self.files = files
|
|
||||||
self.created_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.started_at = None
|
|
||||||
self.finished_at = None
|
|
||||||
self._artifact_archive = b''
|
|
||||||
self.output = b''
|
|
||||||
self.errors = b''
|
|
||||||
self.return_code = 0
|
|
||||||
self.id = None
|
|
||||||
self.status = JobStatus.CREATED
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_finished(self) -> bool:
|
|
||||||
if self.finished_at is None:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output_str(self) -> str:
|
|
||||||
return self.output.decode()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def errors_str(self) -> str:
|
|
||||||
return self.errors.decode()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def artifacts(self) -> TarFileExtractor:
|
|
||||||
if self._artifact_archive == b'':
|
|
||||||
return TarFileExtractor(BytesIO(b''))
|
|
||||||
else:
|
|
||||||
return TarFileExtractor(gzip.GzipFile(fileobj=BytesIO(self._artifact_archive)))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def num_artifacts(self) -> int:
|
|
||||||
return len(list(self.artifacts))
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<Job[{self.id}] - {self.owner} - {self.status.name}>"
|
|
||||||
|
|
||||||
def artifact(self, index: int) -> Tuple[str, bytes]:
|
|
||||||
artifacts = list(self.artifacts)
|
|
||||||
if (index + 1) > len(artifacts):
|
|
||||||
raise IndexError(f"Index {index} out of bounds.")
|
|
||||||
else:
|
|
||||||
return artifacts[index][0], artifacts[index][1].read()
|
|
||||||
|
|
||||||
def queue(self, db_root: PersistentMapping) -> int:
|
|
||||||
logging.debug(f"Attempting to queue job {self}")
|
|
||||||
if self.owner is None or (str(self.owner).strip() == ""):
|
|
||||||
raise ValueError("Job must have an owner to be queued.")
|
|
||||||
|
|
||||||
if self.id is None:
|
|
||||||
self.id = get_new_job_id(db_root)
|
|
||||||
owner = self.owner.upper().strip()
|
|
||||||
if owner not in db_root['user_jobs']:
|
|
||||||
db_root['user_jobs'][owner] = PersistentList()
|
|
||||||
db_root['user_jobs'][owner].append(self.id)
|
|
||||||
db_root['jobs'][self.id] = self
|
|
||||||
db_root['job_queue'].append(self.id)
|
|
||||||
return self.id
|
|
||||||
|
|
||||||
def to_dict(self, include_data: bool = True, binary_safe: bool = False):
|
|
||||||
started_at = None
|
|
||||||
finished_at = None
|
|
||||||
if self.started_at is not None:
|
|
||||||
started_at = self.started_at.isoformat()
|
|
||||||
if self.finished_at is not None:
|
|
||||||
finished_at = self.finished_at.isoformat()
|
|
||||||
output = {
|
|
||||||
"cmd": self.cmd,
|
|
||||||
"owner": self.owner,
|
|
||||||
"created_at": self.created_at.isoformat(),
|
|
||||||
"started_at": started_at,
|
|
||||||
"finished_at": finished_at,
|
|
||||||
"output": b'',
|
|
||||||
"errors": b'',
|
|
||||||
"return_code": self.return_code,
|
|
||||||
"artifacts": [],
|
|
||||||
"status": self.status.name,
|
|
||||||
"id": self.id
|
|
||||||
}
|
|
||||||
if include_data:
|
|
||||||
if binary_safe:
|
|
||||||
output['output'] = base64.b64encode(self.output).decode()
|
|
||||||
output['errors'] = base64.b64encode(self.errors).decode()
|
|
||||||
else:
|
|
||||||
output['output'] = self.output
|
|
||||||
output['errors'] = self.errors
|
|
||||||
|
|
||||||
for a in self.artifacts:
|
|
||||||
if binary_safe:
|
|
||||||
output['artifacts'].append((a[0], base64.b64encode(a[1].read()).decode()))
|
|
||||||
else:
|
|
||||||
output['artifacts'].append((a[0], a[1].read()))
|
|
||||||
return output
|
|
||||||
|
|
||||||
def json(self, include_data: bool = True) -> str:
|
|
||||||
return json.dumps(self.to_dict(include_data=include_data, binary_safe=True))
|
|
||||||
|
|
||||||
def handle_job_get_id(req: Request, conn: PacketServerConnection, db: ZODB.DB, jid: int):
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
value = "y"
|
|
||||||
include_data = True
|
|
||||||
for key in req.vars:
|
|
||||||
if key.lower().strip() == "data":
|
|
||||||
value = req.vars[key].lower().strip()
|
|
||||||
if value in no_values:
|
|
||||||
include_data = False
|
|
||||||
|
|
||||||
with db.transaction() as storage:
|
|
||||||
try:
|
|
||||||
job = Job.get_job_by_id(jid, storage.root())
|
|
||||||
if job is None:
|
|
||||||
send_blank_response(conn, req, 404)
|
|
||||||
return
|
|
||||||
if job.owner != username:
|
|
||||||
send_blank_response(conn, req, 401)
|
|
||||||
return
|
|
||||||
send_blank_response(conn, req, 200, job.to_dict(include_data=include_data))
|
|
||||||
return
|
|
||||||
except:
|
|
||||||
logging.error(f"Error looking up job {jid}:\n{format_exc()}")
|
|
||||||
send_blank_response(conn, req, 500, payload="unknown server error")
|
|
||||||
|
|
||||||
def handle_job_get_user(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
jobs = []
|
|
||||||
value = "y"
|
|
||||||
include_data = True
|
|
||||||
for key in req.vars:
|
|
||||||
if key.lower().strip() == "data":
|
|
||||||
value = req.vars[key].lower().strip()
|
|
||||||
if value in no_values:
|
|
||||||
include_data = False
|
|
||||||
id_only = False
|
|
||||||
if 'id_only' in req.vars:
|
|
||||||
if req.vars['id_only'] in yes_values:
|
|
||||||
id_only = True
|
|
||||||
with db.transaction() as storage:
|
|
||||||
for jid in storage.root()['user_jobs'][username]:
|
|
||||||
jobs.append(Job.get_job_by_id(jid, storage.root()).to_dict(include_data=include_data))
|
|
||||||
|
|
||||||
if id_only:
|
|
||||||
send_blank_response(conn, req, status_code=200, payload=[x['id'] for x in jobs])
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=200, payload=jobs)
|
|
||||||
|
|
||||||
def handle_job_get(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
spl = [x for x in req.path.split("/") if x.strip() != ""]
|
|
||||||
if (len(spl) == 2) and (spl[1].isdigit()):
|
|
||||||
handle_job_get_id(req, conn, db, int(spl[1]))
|
|
||||||
elif (len(spl) == 2) and (spl[1].lower() == "user"):
|
|
||||||
handle_job_get_user(req, conn, db)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
|
|
||||||
def handle_new_job_post(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
quick = False
|
|
||||||
if 'quick' in req.vars:
|
|
||||||
quick_val = req.vars['quick']
|
|
||||||
if type(quick_val) is str:
|
|
||||||
quick_val = quick_val.lower()
|
|
||||||
if quick_val in yes_values:
|
|
||||||
quick = True
|
|
||||||
if 'cmd' not in req.payload:
|
|
||||||
logging.info(f"request {req} did not contain job command (cmd) key")
|
|
||||||
send_blank_response(conn, req, 401, "job post must contain cmd key containing str or list[str]")
|
|
||||||
return
|
|
||||||
if type(req.payload['cmd']) not in [str, list]:
|
|
||||||
send_blank_response(conn, req, 401, "job post must contain cmd key containing str or list[str]")
|
|
||||||
return
|
|
||||||
files = []
|
|
||||||
if 'db' in req.payload:
|
|
||||||
logging.debug(f"Fetching a user db as requested.")
|
|
||||||
try:
|
|
||||||
dbf = RunnerFile('user-db.json.gz', data=get_user_db_json(username.lower(), db))
|
|
||||||
except:
|
|
||||||
logging.error(format_exc())
|
|
||||||
send_blank_response(conn, req, 500)
|
|
||||||
return
|
|
||||||
files.append(dbf)
|
|
||||||
if 'files' in req.payload:
|
|
||||||
if type(files) is dict:
|
|
||||||
for key in req.payload['files']:
|
|
||||||
val = req.payload['files'][key]
|
|
||||||
if type(val) is bytes:
|
|
||||||
files.append(RunnerFile(key, data=val))
|
|
||||||
env = {}
|
|
||||||
if 'env' in req.payload:
|
|
||||||
if type(req.payload['env']) is dict:
|
|
||||||
for key in req.payload['env']:
|
|
||||||
env[key] = req.payload['env'][key]
|
|
||||||
job = Job(req.payload['cmd'], owner=username, env=env, files=files)
|
|
||||||
with db.transaction() as storage:
|
|
||||||
try:
|
|
||||||
new_jid = job.queue(storage.root())
|
|
||||||
logging.info(f"New job created with id {new_jid}")
|
|
||||||
except:
|
|
||||||
logging.error(f"Failed to queue new job {job}:\n{format_exc()}")
|
|
||||||
send_blank_response(conn, req, 500, "unknown server error while queuing job")
|
|
||||||
return
|
|
||||||
if quick:
|
|
||||||
start_time = datetime.datetime.now()
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
job_done = False
|
|
||||||
quick_job = None
|
|
||||||
logging.debug(f"{start_time}: Waiting for a quick job for 30 seconds")
|
|
||||||
while (now - start_time).total_seconds() < 30:
|
|
||||||
with db.transaction() as storage:
|
|
||||||
try:
|
|
||||||
j = Job.get_job_by_id(new_jid, storage.root())
|
|
||||||
if j.is_finished:
|
|
||||||
job_done = True
|
|
||||||
quick_job = j
|
|
||||||
break
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
time.sleep(1)
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
if job_done and (type(quick_job) is Job):
|
|
||||||
send_blank_response(conn, req, 200, job.to_dict(include_data=True))
|
|
||||||
else:
|
|
||||||
logging.warning(f"Quick job {new_jid} timed out.")
|
|
||||||
send_blank_response(conn, req, status_code=202, payload={'job_id': new_jid, 'msg': 'queued'})
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, 201, {'job_id': new_jid})
|
|
||||||
|
|
||||||
def handle_job_post(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
spl = [x for x in req.path.split("/") if x.strip() != ""]
|
|
||||||
|
|
||||||
if len(spl) == 1:
|
|
||||||
handle_new_job_post(req, conn, db)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
|
|
||||||
def job_root_handler(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
logging.debug(f"{req} being processed by job_root_handler")
|
|
||||||
if not user_authorized(conn, db):
|
|
||||||
logging.debug(f"user {conn.remote_callsign} not authorized")
|
|
||||||
send_blank_response(conn, req, status_code=401)
|
|
||||||
return
|
|
||||||
logging.debug("user is authorized")
|
|
||||||
with db.transaction() as storage:
|
|
||||||
if 'jobs_enabled' in storage.root.config:
|
|
||||||
jobs_enabled = storage.root.config['jobs_enabled']
|
|
||||||
else:
|
|
||||||
jobs_enabled = False
|
|
||||||
if not jobs_enabled:
|
|
||||||
send_blank_response(conn, req, 400, payload="jobs not enabled on this server")
|
|
||||||
return
|
|
||||||
if req.method is Request.Method.GET:
|
|
||||||
handle_job_get(req, conn, db)
|
|
||||||
elif req.method is Request.Method.POST:
|
|
||||||
handle_job_post(req, conn, db)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
@@ -1,478 +0,0 @@
|
|||||||
"""BBS private message system"""
|
|
||||||
import ax25
|
|
||||||
import persistent
|
|
||||||
import persistent.list
|
|
||||||
from persistent.mapping import PersistentMapping
|
|
||||||
import datetime
|
|
||||||
from typing import Self,Union,Optional,Iterable,Sequence
|
|
||||||
from packetserver.common import PacketServerConnection, Request, Response, send_response, send_blank_response
|
|
||||||
from packetserver.common import Message as PacketMessage
|
|
||||||
from packetserver.common.constants import yes_values, no_values
|
|
||||||
from packetserver.common.util import from_date_digits, to_date_digits
|
|
||||||
import ZODB
|
|
||||||
import logging
|
|
||||||
import uuid
|
|
||||||
from uuid import UUID
|
|
||||||
from packetserver.common.util import email_valid
|
|
||||||
from packetserver.server.objects import Object
|
|
||||||
from packetserver.server.users import User
|
|
||||||
from BTrees.OOBTree import TreeSet
|
|
||||||
from packetserver.server.users import User, user_authorized
|
|
||||||
from traceback import format_exc
|
|
||||||
from collections import namedtuple
|
|
||||||
import re
|
|
||||||
|
|
||||||
since_regex = """^message\\/since\\/(\\d+)$"""
|
|
||||||
|
|
||||||
def mailbox_create(username: str, db_root: PersistentMapping):
|
|
||||||
un = username.upper().strip()
|
|
||||||
u = User.get_user_by_username(un, db_root)
|
|
||||||
if u is None:
|
|
||||||
raise KeyError(f"Username {username} does not exist.")
|
|
||||||
if not u.enabled:
|
|
||||||
raise KeyError(f"Username {username} does not exist.")
|
|
||||||
if un not in db_root['messages']:
|
|
||||||
db_root['messages'][un] = persistent.list.PersistentList()
|
|
||||||
|
|
||||||
|
|
||||||
def global_unique_message_uuid(db_root: PersistentMapping) -> UUID:
|
|
||||||
if "message_uuids" not in db_root:
|
|
||||||
db_root['message_uuids'] = TreeSet()
|
|
||||||
logging.debug("Created message_uuid set for global message ids.")
|
|
||||||
uid = uuid.uuid4()
|
|
||||||
while uid in db_root['message_uuids']:
|
|
||||||
uid = uuid.uuid4()
|
|
||||||
return uid
|
|
||||||
|
|
||||||
class Attachment:
|
|
||||||
"""Name and data that is sent with a message."""
|
|
||||||
def __init__(self, name: str, data: Union[bytes,bytearray,str]):
|
|
||||||
self._name = ""
|
|
||||||
self._data = b""
|
|
||||||
self._binary = True
|
|
||||||
self.data = data
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@name.setter
|
|
||||||
def name(self, name: str):
|
|
||||||
if name.strip() != self._name:
|
|
||||||
if len(name.strip()) > 300:
|
|
||||||
raise ValueError("Object names must be no more than 300 characters.")
|
|
||||||
self._name = name.strip()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def binary(self):
|
|
||||||
return self._binary
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data(self) -> Union[str,bytes]:
|
|
||||||
if self.binary:
|
|
||||||
return self._data
|
|
||||||
else:
|
|
||||||
return self._data.decode()
|
|
||||||
|
|
||||||
@data.setter
|
|
||||||
def data(self, data: Union[bytes,bytearray,str]):
|
|
||||||
if type(data) in (bytes,bytearray):
|
|
||||||
if bytes(data) != self._data:
|
|
||||||
self._data = bytes(data)
|
|
||||||
self._binary = True
|
|
||||||
else:
|
|
||||||
if str(data).encode() != self._data:
|
|
||||||
self._data = str(data).encode()
|
|
||||||
self._binary = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self) -> int:
|
|
||||||
return len(self.data)
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
return Attachment(self.name, self.data)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(cls, attachment: dict):
|
|
||||||
name = attachment.get("name")
|
|
||||||
data = attachment.get("data")
|
|
||||||
return Attachment(name, data)
|
|
||||||
|
|
||||||
def to_dict(self, include_data: bool = True):
|
|
||||||
d = {
|
|
||||||
"name": self.name,
|
|
||||||
"binary": self.binary,
|
|
||||||
"size_bytes": self.size,
|
|
||||||
"data": b''
|
|
||||||
}
|
|
||||||
if include_data:
|
|
||||||
d['data'] = self.data
|
|
||||||
return d
|
|
||||||
|
|
||||||
class ObjectAttachment(Attachment):
|
|
||||||
def __init__(self, name: str, obj: Object):
|
|
||||||
self.object = obj
|
|
||||||
super().__init__(name, "")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self) -> int:
|
|
||||||
return self.object.size
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data(self) -> Union[str,bytes]:
|
|
||||||
return self.object.data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def binary(self) -> bool:
|
|
||||||
return self.object.binary
|
|
||||||
|
|
||||||
|
|
||||||
class MessageTextTooLongError(Exception):
|
|
||||||
"""Raised when the message text exceeds the length allowed in the server config."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
class MessageAlreadySentError(Exception):
|
|
||||||
"""Raised when the message text exceeds the length allowed in the server config."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Message(persistent.Persistent):
|
|
||||||
def __init__(self, text: str, msg_to: Optional[Iterable[str]]= None, msg_from: Optional[str] = None,
|
|
||||||
attachments: Optional[Iterable[Attachment]] = None):
|
|
||||||
self.retrieved = False
|
|
||||||
self.sent_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.text = text
|
|
||||||
self.attachments = ()
|
|
||||||
self.msg_to = (None,)
|
|
||||||
self.msg_from = None
|
|
||||||
self.msg_id = uuid.uuid4()
|
|
||||||
self.msg_delivered = False
|
|
||||||
if msg_to:
|
|
||||||
if type(msg_to) is str:
|
|
||||||
msg_to = msg_to.upper().strip()
|
|
||||||
self.msg_to = (msg_to,)
|
|
||||||
else:
|
|
||||||
msg_to_tmp = []
|
|
||||||
for i in msg_to:
|
|
||||||
i = str(i).strip().upper()
|
|
||||||
if i == "ALL":
|
|
||||||
msg_to_tmp = ["ALL"]
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
msg_to_tmp.append(i)
|
|
||||||
self.msg_to = tuple(msg_to_tmp)
|
|
||||||
if msg_from:
|
|
||||||
self.msg_from = str(msg_from).upper().strip()
|
|
||||||
|
|
||||||
if attachments:
|
|
||||||
attch = []
|
|
||||||
for i in attachments:
|
|
||||||
if type(i) is Attachment:
|
|
||||||
attch.append(i)
|
|
||||||
elif type(i) is dict:
|
|
||||||
attch.append(Attachment.from_dict(i))
|
|
||||||
elif not isinstance(i,Attachment):
|
|
||||||
attch.append(Attachment("",str(i)))
|
|
||||||
else:
|
|
||||||
attch.append(i)
|
|
||||||
self.attachments = tuple(attch)
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Message: ID: {self.msg_id}, Sent: {self.msg_delivered}>"
|
|
||||||
|
|
||||||
def to_dict(self, get_text: bool = True, get_attachments: bool = True) -> dict:
|
|
||||||
attachments = []
|
|
||||||
for attachment in self.attachments:
|
|
||||||
attachments.append(attachment.to_dict(include_data=get_attachments))
|
|
||||||
d = {
|
|
||||||
"attachments": attachments,
|
|
||||||
"to": self.msg_to,
|
|
||||||
"from": self.msg_from,
|
|
||||||
"id": str(self.msg_id),
|
|
||||||
"sent_at": self.sent_at.isoformat(),
|
|
||||||
"text": ""
|
|
||||||
}
|
|
||||||
if get_text:
|
|
||||||
d['text'] = self.text
|
|
||||||
|
|
||||||
return d
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(cls, data: dict) -> Self:
|
|
||||||
return Message(data['text'],msg_to=data.get('to'), attachments=data.get("attachments"))
|
|
||||||
|
|
||||||
def send(self, db: ZODB.DB) -> tuple:
|
|
||||||
if self.msg_delivered:
|
|
||||||
raise MessageAlreadySentError("Cannot send a private message that has already been sent.")
|
|
||||||
if self.msg_from is None:
|
|
||||||
raise ValueError("Message sender (message_from) cannot be None.")
|
|
||||||
new_attachments = []
|
|
||||||
for i in self.attachments:
|
|
||||||
if isinstance(i,ObjectAttachment):
|
|
||||||
logging.debug("Skpping object attachments for now. Resolve db queries for them at send time.")
|
|
||||||
# new_attachments.append(Attachment(i.name, i.data)) TODO send object attachments
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
new_attachments.append(i)
|
|
||||||
send_counter = 0
|
|
||||||
recipients = []
|
|
||||||
failed = []
|
|
||||||
to_all = False
|
|
||||||
with db.transaction() as db:
|
|
||||||
mailbox_create(self.msg_from, db.root())
|
|
||||||
self.msg_id = global_unique_message_uuid(db.root())
|
|
||||||
for recipient in self.msg_to:
|
|
||||||
recipient = recipient.upper().strip()
|
|
||||||
if recipient is None:
|
|
||||||
continue
|
|
||||||
if recipient == "ALL":
|
|
||||||
recipients = [x for x in db.root.users if db.root.users[x].enabled]
|
|
||||||
to_all = True
|
|
||||||
break
|
|
||||||
recipients.append(recipient)
|
|
||||||
if self.msg_from.upper().strip() in recipients:
|
|
||||||
recipients.remove(self.msg_from.upper().strip())
|
|
||||||
send_counter = send_counter + 1
|
|
||||||
for recipient in recipients:
|
|
||||||
msg = Message(self.text, recipient, self.msg_from, attachments=[x.copy() for x in new_attachments])
|
|
||||||
msg.msg_id = self.msg_id
|
|
||||||
try:
|
|
||||||
mailbox_create(recipient, db.root())
|
|
||||||
msg.msg_delivered = True
|
|
||||||
msg.sent_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
if to_all:
|
|
||||||
msg.msg_to = 'ALL'
|
|
||||||
db.root.messages[recipient].append(msg)
|
|
||||||
send_counter = send_counter + 1
|
|
||||||
except:
|
|
||||||
logging.error(f"Error sending message to {recipient}:\n{format_exc()}")
|
|
||||||
failed.append(recipient)
|
|
||||||
self.msg_delivered = True
|
|
||||||
msg = Message(self.text, recipient, self.msg_from, attachments=[x.copy() for x in new_attachments])
|
|
||||||
msg.msg_id = self.msg_id
|
|
||||||
msg.msg_to = self.msg_to
|
|
||||||
db.root.messages[self.msg_from.upper().strip()].append(msg)
|
|
||||||
return send_counter, failed, self.msg_id
|
|
||||||
|
|
||||||
DisplayOptions = namedtuple('DisplayOptions', ['get_text', 'limit', 'sort_by', 'reverse', 'search',
|
|
||||||
'get_attachments', 'sent_received_all'])
|
|
||||||
|
|
||||||
def parse_display_options(req: Request) -> DisplayOptions:
|
|
||||||
logging.debug(f"Parsing request vars for message get: {req.vars}")
|
|
||||||
sent_received_all = "received"
|
|
||||||
d = req.vars.get("source")
|
|
||||||
if type(d) is str:
|
|
||||||
d.lower().strip()
|
|
||||||
if d == "sent":
|
|
||||||
sent_received_all = "sent"
|
|
||||||
elif d == "all":
|
|
||||||
sent_received_all = "all"
|
|
||||||
|
|
||||||
limit = req.vars.get('limit')
|
|
||||||
try:
|
|
||||||
limit = int(limit)
|
|
||||||
except:
|
|
||||||
limit = None
|
|
||||||
|
|
||||||
d = req.vars.get('fetch_text')
|
|
||||||
if type(d) is str:
|
|
||||||
d.lower().strip()
|
|
||||||
if d in no_values:
|
|
||||||
get_text = False
|
|
||||||
else:
|
|
||||||
get_text = True
|
|
||||||
|
|
||||||
d = req.vars.get('fetch_attachments')
|
|
||||||
logging.debug(f"Parsing fetch_attachment var: {d}")
|
|
||||||
if type(d) is str:
|
|
||||||
d.lower().strip()
|
|
||||||
if d in yes_values:
|
|
||||||
logging.debug("fetch_attachment is yes")
|
|
||||||
get_attachments = True
|
|
||||||
else:
|
|
||||||
get_attachments = False
|
|
||||||
logging.debug("fetch_attachment is no")
|
|
||||||
|
|
||||||
r = req.vars.get('reverse')
|
|
||||||
if type(r) is str:
|
|
||||||
r.lower().strip()
|
|
||||||
if r in yes_values:
|
|
||||||
reverse = True
|
|
||||||
else:
|
|
||||||
reverse = False
|
|
||||||
|
|
||||||
sort = req.vars.get('sort')
|
|
||||||
sort_by = "date"
|
|
||||||
if type(sort) is str:
|
|
||||||
sort = sort.lower().strip()
|
|
||||||
if sort == "from":
|
|
||||||
sort_by = "from"
|
|
||||||
elif sort == "to":
|
|
||||||
sort_by = "to"
|
|
||||||
|
|
||||||
s = req.vars.get('search')
|
|
||||||
search = None
|
|
||||||
if type(s) is str:
|
|
||||||
s = s.lower()
|
|
||||||
if s:
|
|
||||||
search = str(s).lower()
|
|
||||||
|
|
||||||
return DisplayOptions(get_text, limit, sort_by, reverse, search, get_attachments, sent_received_all)
|
|
||||||
|
|
||||||
def handle_messages_since(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
if req.method is not Request.Method.GET:
|
|
||||||
send_blank_response(conn, req, 400, "method not implemented")
|
|
||||||
logging.warning(f"Received req with wrong message for path {req.path}.")
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
since_date = from_date_digits(req.vars['since'])
|
|
||||||
except ValueError as v:
|
|
||||||
send_blank_response(conn, req, 400, "invalid date string")
|
|
||||||
return
|
|
||||||
except:
|
|
||||||
send_blank_response(conn, req, 500, "unknown error")
|
|
||||||
logging.error(f"Unhandled exception: {format_exc()}")
|
|
||||||
return
|
|
||||||
opts = parse_display_options(req)
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
msg_return = []
|
|
||||||
with db.transaction() as db:
|
|
||||||
mailbox_create(username, db.root())
|
|
||||||
mb = db.root.messages[username]
|
|
||||||
logging.debug(f"Only grabbing messages since {since_date}")
|
|
||||||
new_mb = [msg for msg in mb if msg.sent_at >= since_date]
|
|
||||||
if len(new_mb) > 0:
|
|
||||||
logging.debug(f"First message in new list: {new_mb[0].sent_at}")
|
|
||||||
logging.debug(f"Last message in new list: {new_mb[-1].sent_at}")
|
|
||||||
if opts.search:
|
|
||||||
messages = [msg for msg in new_mb if (opts.search in msg.text.lower()) or (opts.search in msg.msg_to[0].lower())
|
|
||||||
or (opts.search in msg.msg_from.lower())]
|
|
||||||
else:
|
|
||||||
messages = [msg for msg in new_mb]
|
|
||||||
|
|
||||||
if opts.sort_by == "from":
|
|
||||||
messages.sort(key=lambda x: x.msg_from, reverse=opts.reverse)
|
|
||||||
elif opts.sort_by == "to":
|
|
||||||
messages.sort(key=lambda x: x.msg_to, reverse=opts.reverse)
|
|
||||||
else:
|
|
||||||
messages.sort(key=lambda x: x.sent_at, reverse=opts.reverse)
|
|
||||||
|
|
||||||
for i in range(0, len(messages)):
|
|
||||||
if opts.limit and (len(msg_return) >= opts.limit):
|
|
||||||
break
|
|
||||||
|
|
||||||
msg = messages[i]
|
|
||||||
msg.retrieved = True
|
|
||||||
msg_return.append(msg.to_dict(get_text=opts.get_text, get_attachments=opts.get_attachments))
|
|
||||||
|
|
||||||
response = Response.blank()
|
|
||||||
response.status_code = 200
|
|
||||||
response.payload = msg_return
|
|
||||||
send_response(conn, response, req)
|
|
||||||
|
|
||||||
def handle_message_get_id(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
uuid_val = req.vars['id']
|
|
||||||
obj_uuid = None
|
|
||||||
try:
|
|
||||||
if type(uuid_val) is bytes:
|
|
||||||
obj_uuid = UUID(bytes=uuid_val)
|
|
||||||
elif type(uuid_val) is int:
|
|
||||||
obj_uuid = UUID(int=uuid_val)
|
|
||||||
elif type(uuid_val) is str:
|
|
||||||
obj_uuid = UUID(uuid_val)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
if obj_uuid is None:
|
|
||||||
send_blank_response(conn, req, 400)
|
|
||||||
return
|
|
||||||
opts = parse_display_options(req)
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
msg = None
|
|
||||||
with db.transaction() as db:
|
|
||||||
mailbox_create(username, db.root())
|
|
||||||
for m in db.root.messages[username]:
|
|
||||||
if m.msg_id == obj_uuid:
|
|
||||||
msg = m
|
|
||||||
break
|
|
||||||
if msg is None:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req,
|
|
||||||
payload=msg.to_dict(get_text=opts.get_text, get_attachments=opts.get_attachments))
|
|
||||||
|
|
||||||
def handle_message_get(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
if 'id' in req.vars:
|
|
||||||
return handle_message_get_id(req, conn, db)
|
|
||||||
|
|
||||||
if 'since' in req.vars:
|
|
||||||
return handle_messages_since(req, conn, db)
|
|
||||||
|
|
||||||
opts = parse_display_options(req)
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
msg_return = []
|
|
||||||
with db.transaction() as db:
|
|
||||||
mailbox_create(username, db.root())
|
|
||||||
mb = db.root.messages[username]
|
|
||||||
if opts.search:
|
|
||||||
messages = [msg for msg in mb if (opts.search in msg.text.lower()) or (opts.search in msg.msg_to[0].lower())
|
|
||||||
or (opts.search in msg.msg_from.lower())]
|
|
||||||
else:
|
|
||||||
messages = [msg for msg in mb]
|
|
||||||
|
|
||||||
if opts.sort_by == "from":
|
|
||||||
messages.sort(key=lambda x: x.msg_from, reverse=opts.reverse)
|
|
||||||
elif opts.sort_by == "to":
|
|
||||||
messages.sort(key=lambda x: x.msg_to, reverse=opts.reverse)
|
|
||||||
else:
|
|
||||||
messages.sort(key=lambda x: x.sent_at, reverse=opts.reverse)
|
|
||||||
|
|
||||||
for i in range(0, len(messages)):
|
|
||||||
if opts.limit and (len(msg_return) >= opts.limit):
|
|
||||||
break
|
|
||||||
|
|
||||||
msg = messages[i]
|
|
||||||
msg.retrieved = True
|
|
||||||
msg_return.append(msg.to_dict(get_text=opts.get_text, get_attachments=opts.get_attachments))
|
|
||||||
|
|
||||||
response = Response.blank()
|
|
||||||
response.status_code = 200
|
|
||||||
response.payload = msg_return
|
|
||||||
send_response(conn, response, req)
|
|
||||||
|
|
||||||
def handle_message_post(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
try:
|
|
||||||
msg = Message.from_dict(req.payload)
|
|
||||||
except:
|
|
||||||
send_blank_response(conn, req, status_code=400)
|
|
||||||
logging.warning(f"User '{username}' attempted to post message with invalid payload: {req.payload}")
|
|
||||||
return
|
|
||||||
msg.msg_from = username
|
|
||||||
try:
|
|
||||||
send_counter, failed, msg_id = msg.send(db)
|
|
||||||
except:
|
|
||||||
send_blank_response(conn, req, status_code=500)
|
|
||||||
logging.error(f"Error while attempting to send message:\n{format_exc()}")
|
|
||||||
return
|
|
||||||
|
|
||||||
send_blank_response(conn, req, status_code=201, payload={
|
|
||||||
"successes": send_counter,
|
|
||||||
"failed": failed,
|
|
||||||
'msg_id': str(msg_id)})
|
|
||||||
|
|
||||||
def message_root_handler(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
logging.debug(f"{req} being processed by message_root_handler")
|
|
||||||
if not user_authorized(conn, db):
|
|
||||||
logging.debug(f"user {conn.remote_callsign} not authorized")
|
|
||||||
send_blank_response(conn, req, status_code=401)
|
|
||||||
return
|
|
||||||
logging.debug("user is authorized")
|
|
||||||
if req.method is Request.Method.GET:
|
|
||||||
handle_message_get(req, conn, db)
|
|
||||||
elif req.method is Request.Method.POST:
|
|
||||||
handle_message_post(req, conn, db)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,435 +0,0 @@
|
|||||||
"""Server object storage system."""
|
|
||||||
from copy import deepcopy
|
|
||||||
|
|
||||||
import persistent
|
|
||||||
import ax25
|
|
||||||
import persistent.list
|
|
||||||
from persistent.mapping import PersistentMapping
|
|
||||||
import datetime
|
|
||||||
from typing import Self,Union,Optional
|
|
||||||
from packetserver.common import PacketServerConnection, Request, Response, Message, send_response, send_blank_response
|
|
||||||
import ZODB
|
|
||||||
import logging
|
|
||||||
import uuid
|
|
||||||
from uuid import UUID
|
|
||||||
from packetserver.server.users import User, user_authorized
|
|
||||||
from collections import namedtuple
|
|
||||||
from traceback import format_exc
|
|
||||||
import base64
|
|
||||||
|
|
||||||
class Object(persistent.Persistent):
|
|
||||||
def __init__(self, name: str = "", data: Union[bytes,bytearray,str] = None):
|
|
||||||
self.private = False
|
|
||||||
self._binary = False
|
|
||||||
self._data = b''
|
|
||||||
self._name = ""
|
|
||||||
self._owner = None
|
|
||||||
if data:
|
|
||||||
self.data = data
|
|
||||||
if name:
|
|
||||||
self._name = name
|
|
||||||
self._uuid = None
|
|
||||||
self.created_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.modified_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@name.setter
|
|
||||||
def name(self, name: str):
|
|
||||||
if name.strip() != self._name:
|
|
||||||
if len(name.strip()) > 300:
|
|
||||||
raise ValueError("Object names must be no more than 300 characters.")
|
|
||||||
self._name = name.strip()
|
|
||||||
self.touch()
|
|
||||||
|
|
||||||
def touch(self):
|
|
||||||
self.modified_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self) -> int:
|
|
||||||
return len(self.data)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def binary(self):
|
|
||||||
return self._binary
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data(self) -> Union[str,bytes]:
|
|
||||||
if self.binary:
|
|
||||||
return self._data
|
|
||||||
else:
|
|
||||||
return self._data.decode()
|
|
||||||
|
|
||||||
@data.setter
|
|
||||||
def data(self, data: Union[bytes,bytearray,str]):
|
|
||||||
if type(data) in (bytes,bytearray):
|
|
||||||
if bytes(data) != self._data:
|
|
||||||
self._data = bytes(data)
|
|
||||||
self._binary = True
|
|
||||||
self.touch()
|
|
||||||
else:
|
|
||||||
if str(data).encode() != self._data:
|
|
||||||
self._data = str(data).encode()
|
|
||||||
self._binary = False
|
|
||||||
self.touch()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def owner(self) -> Optional[UUID]:
|
|
||||||
return self._owner
|
|
||||||
|
|
||||||
@owner.setter
|
|
||||||
def owner(self, owner_uuid: UUID):
|
|
||||||
if owner_uuid:
|
|
||||||
if type(owner_uuid) is UUID:
|
|
||||||
self._owner = owner_uuid
|
|
||||||
self.touch()
|
|
||||||
else:
|
|
||||||
raise ValueError("Owner must be a UUID")
|
|
||||||
else:
|
|
||||||
self._owner = None
|
|
||||||
self.touch()
|
|
||||||
|
|
||||||
def chown(self, username: str, db: ZODB.DB):
|
|
||||||
logging.debug(f"chowning object {self} to user {username}")
|
|
||||||
un = username.strip().upper()
|
|
||||||
old_owner_uuid = self._owner
|
|
||||||
with db.transaction() as db:
|
|
||||||
user = User.get_user_by_username(username, db.root())
|
|
||||||
old_owner = User.get_user_by_uuid(old_owner_uuid, db.root())
|
|
||||||
if user:
|
|
||||||
logging.debug(f"new owner user exists: {user}")
|
|
||||||
db.root.objects[self.uuid].owner = user.uuid
|
|
||||||
if old_owner_uuid:
|
|
||||||
if old_owner:
|
|
||||||
logging.debug(f"The object has an old owner user: {old_owner}")
|
|
||||||
old_owner.remove_obj_uuid(self.uuid)
|
|
||||||
logging.debug(f"adding this object uuid to user objects set ({self.uuid})")
|
|
||||||
logging.debug(f"user {user} objects before: {user.object_uuids}")
|
|
||||||
user.add_obj_uuid(self.uuid)
|
|
||||||
logging.debug(f"user objects now: {user.object_uuids}")
|
|
||||||
else:
|
|
||||||
raise KeyError(f"User '{un}' not found.")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_object_by_uuid(cls, obj: UUID, db_root: PersistentMapping):
|
|
||||||
return db_root['objects'].get(obj)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_objects_by_username(cls, username: str, db: ZODB.DB) -> list[Self]:
|
|
||||||
un = username.strip().upper()
|
|
||||||
objs = []
|
|
||||||
with db.transaction() as db:
|
|
||||||
user = User.get_user_by_username(username, db.root())
|
|
||||||
if user:
|
|
||||||
uuids = user.object_uuids
|
|
||||||
for u in uuids:
|
|
||||||
try:
|
|
||||||
obj = cls.get_object_by_uuid(u, db)
|
|
||||||
if obj:
|
|
||||||
objs.append(obj)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
return objs
|
|
||||||
|
|
||||||
@property
|
|
||||||
def uuid(self) -> Optional[UUID]:
|
|
||||||
return self._uuid
|
|
||||||
|
|
||||||
def write_new(self, db: ZODB.DB) -> UUID:
|
|
||||||
if self.uuid:
|
|
||||||
raise KeyError("Object already has UUID. Manually clear it to write it again.")
|
|
||||||
self._uuid = uuid.uuid4()
|
|
||||||
with db.transaction() as db:
|
|
||||||
while self.uuid in db.root.objects:
|
|
||||||
self._uuid = uuid.uuid4()
|
|
||||||
db.root.objects[self.uuid] = self
|
|
||||||
self.touch()
|
|
||||||
return self.uuid
|
|
||||||
|
|
||||||
def to_dict(self, include_data: bool = True) -> dict:
|
|
||||||
data = b''
|
|
||||||
if include_data:
|
|
||||||
data = self.data
|
|
||||||
if self.uuid:
|
|
||||||
uuid_bytes = self.uuid.bytes
|
|
||||||
else:
|
|
||||||
uuid_bytes = None
|
|
||||||
|
|
||||||
return {
|
|
||||||
"name": self.name,
|
|
||||||
"uuid_bytes": uuid_bytes,
|
|
||||||
"size_bytes": self.size,
|
|
||||||
"binary": self.binary,
|
|
||||||
"private": self.private,
|
|
||||||
"created_at": self.created_at.isoformat(),
|
|
||||||
"modified_at": self.modified_at.isoformat(),
|
|
||||||
"includes_data": include_data,
|
|
||||||
"data": data
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(cls, obj: dict) -> Self:
|
|
||||||
o = Object(name=obj['name'])
|
|
||||||
if 'uuid_bytes' in obj:
|
|
||||||
if obj['uuid_bytes']:
|
|
||||||
o._uuid = UUID(bytes=obj['uuid_bytes'])
|
|
||||||
o.private = obj['private']
|
|
||||||
o.data = obj['data']
|
|
||||||
o._binary = obj['binary']
|
|
||||||
return o
|
|
||||||
|
|
||||||
def authorized_write(self, username: str, db: ZODB.DB):
|
|
||||||
un = username.strip().upper()
|
|
||||||
with db.transaction() as db:
|
|
||||||
user = User.get_user_by_username(username, db.root())
|
|
||||||
if user:
|
|
||||||
if user.uuid == self.owner:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def authorized_get(self, username: str, db: ZODB.DB):
|
|
||||||
if not self.private:
|
|
||||||
return True
|
|
||||||
un = username.strip().upper()
|
|
||||||
with db.transaction() as db:
|
|
||||||
user = User.get_user_by_username(username, db.root())
|
|
||||||
if user:
|
|
||||||
if user.uuid == self.owner:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<Object: '{self.name}', {self.size}b, {self.uuid}>"
|
|
||||||
|
|
||||||
DisplayOptions = namedtuple('DisplayOptions', ['get_data', 'limit', 'sort_by', 'reverse', 'search'])
|
|
||||||
|
|
||||||
def parse_display_options(req: Request) -> DisplayOptions:
|
|
||||||
limit = req.vars.get('limit')
|
|
||||||
try:
|
|
||||||
limit = int(limit)
|
|
||||||
except:
|
|
||||||
limit = None
|
|
||||||
|
|
||||||
d = req.vars.get('fetch')
|
|
||||||
if type(d) is str:
|
|
||||||
d.lower().strip()
|
|
||||||
if d in [1, 'y', True, 'yes', 'true', 't']:
|
|
||||||
get_data = True
|
|
||||||
else:
|
|
||||||
get_data = False
|
|
||||||
|
|
||||||
r = req.vars.get('reverse')
|
|
||||||
if type(r) is str:
|
|
||||||
r.lower().strip()
|
|
||||||
if r in [1, 'y', True, 'yes', 'true', 't']:
|
|
||||||
reverse = True
|
|
||||||
else:
|
|
||||||
reverse = False
|
|
||||||
|
|
||||||
sort = req.vars.get('sort')
|
|
||||||
sort_by = "name"
|
|
||||||
if type(sort) is str:
|
|
||||||
sort = sort.lower().strip()
|
|
||||||
if sort == "date":
|
|
||||||
sort_by = "date"
|
|
||||||
elif sort == "size":
|
|
||||||
sort_by = "size"
|
|
||||||
|
|
||||||
s = req.vars.get('search')
|
|
||||||
search = None
|
|
||||||
if type(s) is str:
|
|
||||||
s = s.lower()
|
|
||||||
if s:
|
|
||||||
search = str(s)
|
|
||||||
|
|
||||||
return DisplayOptions(get_data, limit, sort_by, reverse, search)
|
|
||||||
|
|
||||||
def object_display_filter(source: list[Object], opts: DisplayOptions) -> list[dict]:
|
|
||||||
if opts.search:
|
|
||||||
objs = [x for x in source if str(opts.search) in x.name.lower()]
|
|
||||||
else:
|
|
||||||
objs = deepcopy(source)
|
|
||||||
|
|
||||||
if opts.sort_by == "size":
|
|
||||||
objs.sort(key=lambda x: x.size, reverse=opts.reverse)
|
|
||||||
|
|
||||||
elif opts.sort_by == "date":
|
|
||||||
objs.sort(key=lambda x: x.modified_at, reverse=opts.reverse)
|
|
||||||
else:
|
|
||||||
objs.sort(key=lambda x: x.name, reverse=opts.reverse)
|
|
||||||
|
|
||||||
if opts.limit:
|
|
||||||
if len(objs) >= opts.limit:
|
|
||||||
objs = objs[:opts.limit]
|
|
||||||
|
|
||||||
return [o.to_dict(include_data=opts.get_data) for o in objs]
|
|
||||||
|
|
||||||
def handle_get_no_path(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
opts = parse_display_options(req)
|
|
||||||
logging.debug(f"Handling a GET 'object' request: {opts}")
|
|
||||||
response = Response.blank()
|
|
||||||
response.status_code = 404
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
with db.transaction() as db:
|
|
||||||
user = User.get_user_by_username(username, db.root())
|
|
||||||
if not user:
|
|
||||||
send_blank_response(conn, req, status_code=500, payload="Unknown user account problem")
|
|
||||||
return
|
|
||||||
if 'uuid' in req.vars:
|
|
||||||
logging.debug(f"uuid req.var: {req.vars['uuid']}")
|
|
||||||
uid = req.vars['uuid']
|
|
||||||
if type(uid) is bytes:
|
|
||||||
obj = Object.get_object_by_uuid(UUID(bytes=uid), db.root())
|
|
||||||
if obj:
|
|
||||||
if not obj.owner == user.uuid:
|
|
||||||
if not obj.private:
|
|
||||||
send_blank_response(conn, req, status_code=401)
|
|
||||||
return
|
|
||||||
if opts.get_data:
|
|
||||||
response.payload = obj.to_dict()
|
|
||||||
response.status_code = 200
|
|
||||||
else:
|
|
||||||
response.payload = obj.to_dict(include_data=False)
|
|
||||||
response.status_code = 200
|
|
||||||
else:
|
|
||||||
uuids = user.object_uuids
|
|
||||||
objs = []
|
|
||||||
logging.debug(f"No uuid var, all user object_uuids: {uuids}")
|
|
||||||
for i in uuids:
|
|
||||||
obj = Object.get_object_by_uuid(i, db.root())
|
|
||||||
logging.debug(f"Checking {obj}")
|
|
||||||
if not obj.private:
|
|
||||||
logging.debug("object not private")
|
|
||||||
objs.append(obj)
|
|
||||||
else:
|
|
||||||
logging.debug("object private")
|
|
||||||
if obj.owner == user.uuid:
|
|
||||||
logging.debug("user uuid matches object uuid")
|
|
||||||
objs.append(obj)
|
|
||||||
response.payload = object_display_filter(objs, opts)
|
|
||||||
logging.debug(f"object payload: {response.payload}")
|
|
||||||
response.status_code = 200
|
|
||||||
|
|
||||||
send_response(conn, response, req)
|
|
||||||
|
|
||||||
def handle_object_get(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
# Case: User searching their own objects -> list
|
|
||||||
# or passes specific UUID as var -> Object
|
|
||||||
handle_get_no_path(req, conn, db)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_object_post(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
if type(req.payload) is not dict:
|
|
||||||
send_blank_response(conn, req, 400, payload="object payload must be 'dict'")
|
|
||||||
|
|
||||||
try:
|
|
||||||
obj = Object.from_dict(req.payload)
|
|
||||||
except:
|
|
||||||
logging.debug(f"Error parsing new object:\n{format_exc()}")
|
|
||||||
send_blank_response(conn, req, status_code=400)
|
|
||||||
return
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
with db.transaction() as db_conn:
|
|
||||||
logging.debug(f"User {username}'s objects: {db_conn.root.users[username].object_uuids}")
|
|
||||||
logging.debug(f"writing new object: {obj}")
|
|
||||||
obj.write_new(db)
|
|
||||||
logging.debug("chowning new object")
|
|
||||||
obj.chown(username, db)
|
|
||||||
send_blank_response(conn, req, status_code=201, payload=str(obj.uuid))
|
|
||||||
|
|
||||||
def handle_object_update(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
if type(req.payload) is not dict:
|
|
||||||
send_blank_response(conn, req, status_code=400)
|
|
||||||
return
|
|
||||||
if 'uuid' in req.vars:
|
|
||||||
uid = req.vars['uuid']
|
|
||||||
if type(uid) is bytes:
|
|
||||||
u_obj = UUID(bytes=uid)
|
|
||||||
elif type(uid) is int:
|
|
||||||
u_obj = UUID(int=uid)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
u_obj = UUID(str(uid))
|
|
||||||
except ValueError:
|
|
||||||
send_blank_response(conn, req, status_code=400)
|
|
||||||
new_name = req.payload.get("name")
|
|
||||||
new_data = req.payload.get("data")
|
|
||||||
if new_data:
|
|
||||||
if type(new_data) not in (bytes, bytearray, str):
|
|
||||||
send_blank_response(conn, req, status_code=400)
|
|
||||||
return
|
|
||||||
with db.transaction() as db:
|
|
||||||
obj = Object.get_object_by_uuid(uid, db.root())
|
|
||||||
user = User.get_user_by_username(username, db.root())
|
|
||||||
if user.uuid != obj.owner:
|
|
||||||
send_blank_response(conn, req, status_code=401)
|
|
||||||
return
|
|
||||||
if obj is None:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
return
|
|
||||||
if new_name:
|
|
||||||
obj.name = new_name
|
|
||||||
if new_data:
|
|
||||||
obj.data = new_data
|
|
||||||
send_blank_response(conn, req, status_code=200)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=400)
|
|
||||||
return
|
|
||||||
|
|
||||||
def handle_object_delete(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
if 'uuid' in req.vars:
|
|
||||||
uid = req.vars['uuid']
|
|
||||||
if type(uid) is bytes:
|
|
||||||
u_obj = UUID(bytes=uid)
|
|
||||||
elif type(uid) is int:
|
|
||||||
u_obj = UUID(int=uid)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
u_obj = UUID(str(uid))
|
|
||||||
except ValueError:
|
|
||||||
send_blank_response(conn, req, status_code=400, payload='badly formatted uuid')
|
|
||||||
return
|
|
||||||
with db.transaction() as db:
|
|
||||||
obj = Object.get_object_by_uuid(u_obj, db.root())
|
|
||||||
user = User.get_user_by_username(username, db.root())
|
|
||||||
if user.uuid != obj.owner:
|
|
||||||
send_blank_response(conn, req, status_code=401)
|
|
||||||
return
|
|
||||||
if obj is None:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
user.remove_obj_uuid(u_obj)
|
|
||||||
del db.root.objects[u_obj]
|
|
||||||
except:
|
|
||||||
send_blank_response(conn, req, status_code=500)
|
|
||||||
logging.error(f"Error handling delete:\n{format_exc()}")
|
|
||||||
send_blank_response(conn, req, status_code=200)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=400)
|
|
||||||
return
|
|
||||||
|
|
||||||
def object_root_handler(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
logging.debug(f"{req} being processed by user_root_handler")
|
|
||||||
if not user_authorized(conn, db):
|
|
||||||
logging.debug(f"user {conn.remote_callsign} not authorized")
|
|
||||||
send_blank_response(conn, req, status_code=401)
|
|
||||||
return
|
|
||||||
logging.debug("user is authorized")
|
|
||||||
if req.method is Request.Method.GET:
|
|
||||||
handle_object_get(req, conn, db)
|
|
||||||
elif req.method is Request.Method.POST:
|
|
||||||
handle_object_post(req, conn, db)
|
|
||||||
elif req.method is Request.Method.UPDATE:
|
|
||||||
handle_object_update(req, conn, db)
|
|
||||||
elif req.method is Request.Method.DELETE:
|
|
||||||
handle_object_delete(req, conn, db)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
"""Module for handling requests as they arrive to connection objects and servers."""
|
|
||||||
import ax25
|
|
||||||
from msgpack.exceptions import OutOfData
|
|
||||||
from packetserver.common import Message, Request, Response, PacketServerConnection, send_response, send_blank_response
|
|
||||||
from .bulletin import bulletin_root_handler
|
|
||||||
from .users import user_root_handler, user_authorized
|
|
||||||
from .objects import object_root_handler
|
|
||||||
from .messages import message_root_handler
|
|
||||||
from .jobs import job_root_handler
|
|
||||||
import logging
|
|
||||||
from typing import Union
|
|
||||||
import ZODB
|
|
||||||
|
|
||||||
def handle_root_get(req: Request, conn: PacketServerConnection,
|
|
||||||
db: ZODB.DB):
|
|
||||||
logging.debug(f"Root get handler received request: {req}")
|
|
||||||
response = Response.blank()
|
|
||||||
response.compression = Message.CompressionType.BZIP2
|
|
||||||
operator = ""
|
|
||||||
motd = ""
|
|
||||||
jobs_enabled = False
|
|
||||||
with db.transaction() as storage:
|
|
||||||
if 'motd' in storage.root.config:
|
|
||||||
motd = storage.root.config['motd']
|
|
||||||
if 'operator' in storage.root.config:
|
|
||||||
operator = storage.root.config['operator']
|
|
||||||
if 'jobs_enabled' in storage.root.config:
|
|
||||||
jobs_enabled = storage.root.config['jobs_enabled']
|
|
||||||
logging.debug(f"Root handler retrieved config. {operator} - {motd}")
|
|
||||||
logging.debug("Running user_authorized")
|
|
||||||
base = ax25.Address(conn.remote_callsign).call
|
|
||||||
if user_authorized(conn, db):
|
|
||||||
user_message = f"User {base} is enabled."
|
|
||||||
else:
|
|
||||||
user_message = f"User {base} is not enabled."
|
|
||||||
logging.debug(f"User authorized: {user_message}")
|
|
||||||
response.payload = {
|
|
||||||
'operator': operator,
|
|
||||||
'motd': motd,
|
|
||||||
'user': user_message,
|
|
||||||
'accepts_jobs': jobs_enabled
|
|
||||||
}
|
|
||||||
|
|
||||||
logging.debug(f"Sending response {response}")
|
|
||||||
send_response(conn, response, req)
|
|
||||||
logging.debug("Sent reesponse.")
|
|
||||||
|
|
||||||
def root_root_handler(req: Request, conn: PacketServerConnection,
|
|
||||||
db: ZODB.DB):
|
|
||||||
logging.debug(f"{req} got to root_root_handler")
|
|
||||||
if req.method is Request.Method.GET:
|
|
||||||
handle_root_get(req, conn, db)
|
|
||||||
else:
|
|
||||||
logging.warning(f"unhandled request found: {req}")
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
|
|
||||||
standard_handlers = {
|
|
||||||
"": root_root_handler,
|
|
||||||
"bulletin": bulletin_root_handler,
|
|
||||||
"user": user_root_handler,
|
|
||||||
"object": object_root_handler,
|
|
||||||
"message": message_root_handler,
|
|
||||||
"job": job_root_handler
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,115 +0,0 @@
|
|||||||
import tempfile
|
|
||||||
from packetserver.common import Response, Message, Request, send_response, send_blank_response
|
|
||||||
from packetserver.common.testing import DirectoryTestServerConnection, DummyPacketServerConnection
|
|
||||||
from pe.connect import ConnectionState
|
|
||||||
from shutil import rmtree
|
|
||||||
from threading import Thread
|
|
||||||
from . import Server
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
from traceback import format_exc
|
|
||||||
|
|
||||||
class TestServer(Server):
|
|
||||||
def __init__(self, server_callsign: str, data_dir: str = None, zeo: bool = True):
|
|
||||||
super().__init__('localhost', 8000, server_callsign, data_dir=data_dir, zeo=zeo)
|
|
||||||
self._data_pid = 1
|
|
||||||
self._file_traffic_dir = tempfile.mkdtemp()
|
|
||||||
self._file_traffic_thread = None
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
if self.orchestrator is not None:
|
|
||||||
self.orchestrator.start()
|
|
||||||
self.start_db()
|
|
||||||
self.started = True
|
|
||||||
self.worker_thread = Thread(target=self.run_worker)
|
|
||||||
self.worker_thread.start()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.started = False
|
|
||||||
if self.orchestrator is not None:
|
|
||||||
self.orchestrator.stop()
|
|
||||||
self.stop_db()
|
|
||||||
rmtree(self._file_traffic_dir)
|
|
||||||
|
|
||||||
def data_pid(self) -> int:
|
|
||||||
old = self._data_pid
|
|
||||||
self._data_pid = self._data_pid + 1
|
|
||||||
return old
|
|
||||||
|
|
||||||
def send_test_data(self, conn: DummyPacketServerConnection, data: bytearray):
|
|
||||||
conn.data_received(self.data_pid(), data)
|
|
||||||
self.server_receiver(conn)
|
|
||||||
|
|
||||||
|
|
||||||
class DirectoryTestServer(Server):
|
|
||||||
def __init__(self, server_callsign: str, connection_directory: str, data_dir: str = None, zeo: bool = True):
|
|
||||||
super().__init__('localhost', 8000, server_callsign, data_dir=data_dir, zeo=zeo)
|
|
||||||
if not os.path.isdir(connection_directory):
|
|
||||||
raise NotADirectoryError(f"{connection_directory} is not a directory or doesn't exist.")
|
|
||||||
self._file_traffic_dir = os.path.abspath(connection_directory)
|
|
||||||
self._dir_connections = []
|
|
||||||
|
|
||||||
def check_connection_directories(self):
|
|
||||||
logging.debug(f"Server checking connection directory {self._file_traffic_dir}")
|
|
||||||
if not os.path.isdir(self._file_traffic_dir):
|
|
||||||
raise NotADirectoryError(f"{self._file_traffic_dir} is not a directory or doesn't exist.")
|
|
||||||
|
|
||||||
for path in os.listdir(self._file_traffic_dir):
|
|
||||||
dir_path = os.path.join(self._file_traffic_dir, path)
|
|
||||||
logging.debug(f"Checking directory {dir_path}")
|
|
||||||
if not os.path.isdir(dir_path):
|
|
||||||
logging.debug(f"Server: {dir_path} is not a directory; skipping")
|
|
||||||
continue
|
|
||||||
|
|
||||||
conn_exists = False
|
|
||||||
for conn in self._dir_connections:
|
|
||||||
if os.path.abspath(conn.directory) == dir_path:
|
|
||||||
conn_exists = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if conn_exists:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn = DirectoryTestServerConnection.create_directory_connection(self.callsign, dir_path)
|
|
||||||
logging.debug(f"New connection detected from {conn.remote_callsign}")
|
|
||||||
self._dir_connections.append(conn)
|
|
||||||
self.server_connection_bouncer(conn)
|
|
||||||
except ValueError:
|
|
||||||
logging.debug(format_exc())
|
|
||||||
pass
|
|
||||||
|
|
||||||
closed = []
|
|
||||||
|
|
||||||
for conn in self._dir_connections:
|
|
||||||
conn.check_for_data()
|
|
||||||
if conn.state is not ConnectionState.CONNECTED:
|
|
||||||
closed.append(conn)
|
|
||||||
|
|
||||||
for conn in closed:
|
|
||||||
if conn in self._dir_connections:
|
|
||||||
self._dir_connections.remove(conn)
|
|
||||||
|
|
||||||
def dir_worker(self):
|
|
||||||
"""Intended to be running as a thread."""
|
|
||||||
logging.info("Starting worker thread.")
|
|
||||||
while self.started:
|
|
||||||
self.server_worker()
|
|
||||||
self.check_connection_directories()
|
|
||||||
time.sleep(.5)
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
if self.orchestrator is not None:
|
|
||||||
self.orchestrator.start()
|
|
||||||
self.start_db()
|
|
||||||
self.started = True
|
|
||||||
self.worker_thread = Thread(target=self.dir_worker)
|
|
||||||
self.worker_thread.start()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.started = False
|
|
||||||
if self.orchestrator is not None:
|
|
||||||
self.orchestrator.stop()
|
|
||||||
self.stop_db()
|
|
||||||
@@ -1,321 +0,0 @@
|
|||||||
"""Module containing code related to users."""
|
|
||||||
|
|
||||||
import ax25
|
|
||||||
import persistent
|
|
||||||
import persistent.list
|
|
||||||
from persistent.list import PersistentList
|
|
||||||
from persistent.mapping import PersistentMapping
|
|
||||||
import datetime
|
|
||||||
from typing import Self,Union,Optional
|
|
||||||
from packetserver.common import PacketServerConnection, Request, Response, Message, send_response, send_blank_response
|
|
||||||
import ZODB
|
|
||||||
import logging
|
|
||||||
import uuid
|
|
||||||
from traceback import format_exc
|
|
||||||
from uuid import UUID
|
|
||||||
from packetserver.common.util import email_valid
|
|
||||||
from BTrees.OOBTree import TreeSet
|
|
||||||
|
|
||||||
class User(persistent.Persistent):
|
|
||||||
def __init__(self, username: str, enabled: bool = True, hidden: bool = False, bio: str = "", status: str = "",
|
|
||||||
email: str = None, location: str = "", socials: list[str] = None):
|
|
||||||
self._username = username.upper().strip()
|
|
||||||
self.enabled = enabled
|
|
||||||
self.hidden = hidden
|
|
||||||
self.created_at = datetime.datetime.now(datetime.UTC)
|
|
||||||
self.last_seen = self.created_at
|
|
||||||
self._email = ""
|
|
||||||
if email:
|
|
||||||
self.email = email
|
|
||||||
self._location = ""
|
|
||||||
self.location = location
|
|
||||||
self._socials = []
|
|
||||||
if socials:
|
|
||||||
self.socials = socials
|
|
||||||
self._uuid = None
|
|
||||||
self.bio = bio
|
|
||||||
self._status = ""
|
|
||||||
self.status = status
|
|
||||||
self._objects = TreeSet()
|
|
||||||
|
|
||||||
def write_new(self, db_root: PersistentMapping):
|
|
||||||
all_uuids = [db_root['users'][x].uuid for x in db_root['users']]
|
|
||||||
self._uuid = uuid.uuid4()
|
|
||||||
while self.uuid in all_uuids:
|
|
||||||
self._uuid = uuid.uuid4()
|
|
||||||
logging.debug(f"Creating new user account {self.username} - {self.uuid}")
|
|
||||||
if self.username not in db_root['users']:
|
|
||||||
db_root['users'][self.username] = self
|
|
||||||
|
|
||||||
@property
|
|
||||||
def object_uuids(self) -> list[UUID]:
|
|
||||||
return list(self._objects)
|
|
||||||
|
|
||||||
def remove_obj_uuid(self, obj: UUID):
|
|
||||||
self._objects.remove(obj)
|
|
||||||
|
|
||||||
def add_obj_uuid(self, obj: UUID):
|
|
||||||
self._objects.add(obj)
|
|
||||||
|
|
||||||
def user_has_obj(self, obj: UUID) -> bool:
|
|
||||||
if obj in self._objects:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def location(self) -> str:
|
|
||||||
return self._location
|
|
||||||
|
|
||||||
@location.setter
|
|
||||||
def location(self, location: str):
|
|
||||||
if len(location) > 1000:
|
|
||||||
self._location = location[:1000]
|
|
||||||
else:
|
|
||||||
self._location = location
|
|
||||||
|
|
||||||
@property
|
|
||||||
def email(self) -> str:
|
|
||||||
return self._email
|
|
||||||
|
|
||||||
@email.setter
|
|
||||||
def email(self, email: str):
|
|
||||||
if email_valid(email.strip().lower()):
|
|
||||||
self._email = email.strip().lower()
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid e-mail given: {email}")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def socials(self) -> list[str]:
|
|
||||||
return []
|
|
||||||
|
|
||||||
@socials.setter
|
|
||||||
def socials(self, socials: list[str]):
|
|
||||||
for social in socials:
|
|
||||||
if len(social) > 300:
|
|
||||||
social = social[:300]
|
|
||||||
self._socials.append(social)
|
|
||||||
|
|
||||||
def add_social(self, social: str):
|
|
||||||
if len(social) > 300:
|
|
||||||
social = social[:300]
|
|
||||||
self._socials.append(social)
|
|
||||||
|
|
||||||
def remove_social(self, social: str):
|
|
||||||
self.socials.remove(social)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def uuid(self):
|
|
||||||
return self._uuid
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_user_by_username(cls, username: str, db_root: PersistentMapping) -> Self:
|
|
||||||
try:
|
|
||||||
if username.upper().strip() in db_root['users']:
|
|
||||||
return db_root['users'][username.upper().strip()]
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_user_by_uuid(cls, user_uuid: Union[UUID, bytes, int, str], db_root: PersistentMapping) -> Self:
|
|
||||||
try:
|
|
||||||
if type(uuid) is uuid.UUID:
|
|
||||||
uid = user_uuid
|
|
||||||
elif type(uuid) is bytes:
|
|
||||||
uid = uuid.UUID(bytes=user_uuid)
|
|
||||||
elif type(uuid) is int:
|
|
||||||
uid = uuid.UUID(int=user_uuid)
|
|
||||||
else:
|
|
||||||
uid = uuid.UUID(str(user_uuid))
|
|
||||||
for user in db_root['users']:
|
|
||||||
if uid == db_root['users'][user].uuid:
|
|
||||||
return db_root['users'][user].uuid
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_all_users(cls, db_root: PersistentMapping, limit: int = None) -> list:
|
|
||||||
all_users = sorted(db_root['users'].values(), key=lambda user: user.username)
|
|
||||||
if not limit:
|
|
||||||
return all_users
|
|
||||||
else:
|
|
||||||
if len(all_users) < limit:
|
|
||||||
return all_users
|
|
||||||
else:
|
|
||||||
return all_users[:limit]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def is_authorized(cls, username: str, db_root: PersistentMapping) -> bool:
|
|
||||||
user = User.get_user_by_username(username, db_root)
|
|
||||||
if user:
|
|
||||||
if user.enabled:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def seen(self):
|
|
||||||
self.last_seen = datetime.datetime.now(datetime.UTC)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def username(self) -> str:
|
|
||||||
return self._username.upper().strip()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bio(self) -> str:
|
|
||||||
return self._bio
|
|
||||||
|
|
||||||
@bio.setter
|
|
||||||
def bio(self, bio: str):
|
|
||||||
if len(bio) > 4000:
|
|
||||||
self._bio = bio[:4000]
|
|
||||||
else:
|
|
||||||
self._bio = bio
|
|
||||||
|
|
||||||
@property
|
|
||||||
def status(self) -> str:
|
|
||||||
return self._status
|
|
||||||
|
|
||||||
@status.setter
|
|
||||||
def status(self, status: str):
|
|
||||||
if len(status) > 300:
|
|
||||||
self._status = status[:300]
|
|
||||||
else:
|
|
||||||
self._status = status
|
|
||||||
|
|
||||||
def to_safe_dict(self) -> dict:
|
|
||||||
return {
|
|
||||||
"username": self.username,
|
|
||||||
"status": self.status,
|
|
||||||
"bio": self.bio,
|
|
||||||
"socials": self.socials,
|
|
||||||
"email": self.email,
|
|
||||||
"location": self.location,
|
|
||||||
"last_seen": self.last_seen.isoformat(),
|
|
||||||
"created_at": self.created_at.isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<User: {self.username} - {self.uuid}>"
|
|
||||||
|
|
||||||
def user_authorized(conn: PacketServerConnection, db: ZODB.DB) -> bool:
|
|
||||||
username = ax25.Address(conn.remote_callsign).call
|
|
||||||
logging.debug(f"Running authcheck for user {username}")
|
|
||||||
result = False
|
|
||||||
with db.transaction() as db:
|
|
||||||
result = User.is_authorized(username, db.root())
|
|
||||||
logging.debug(f"User is authorized? {result}")
|
|
||||||
return result
|
|
||||||
|
|
||||||
def handle_user_get(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
sp = req.path.split("/")
|
|
||||||
logging.debug("handle_user_get working")
|
|
||||||
user = None
|
|
||||||
user_var = req.vars.get('username')
|
|
||||||
response = Response.blank()
|
|
||||||
response.status_code = 404
|
|
||||||
limit = None
|
|
||||||
if 'limit' in req.vars:
|
|
||||||
try:
|
|
||||||
limit = int(req.vars['limit'])
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
with db.transaction() as db:
|
|
||||||
if len(sp) > 1:
|
|
||||||
logging.debug(f"trying to get the username from the path {sp[1].strip().upper()}")
|
|
||||||
user = User.get_user_by_username(sp[1].strip().upper(), db.root())
|
|
||||||
logging.debug(f"user holds: {user}")
|
|
||||||
if user and not user.hidden:
|
|
||||||
response.status_code = 200
|
|
||||||
response.payload = user.to_safe_dict()
|
|
||||||
else:
|
|
||||||
if user_var:
|
|
||||||
user = User.get_user_by_username(user_var.upper().strip(), db.root())
|
|
||||||
if user and not user.hidden:
|
|
||||||
response.status_code = 200
|
|
||||||
response.payload = user.to_safe_dict()
|
|
||||||
else:
|
|
||||||
if user_var:
|
|
||||||
user = User.get_user_by_username(user_var.upper().strip(), db.root())
|
|
||||||
if user and not user.hidden:
|
|
||||||
response.status_code = 200
|
|
||||||
response.payload = user.to_safe_dict()
|
|
||||||
else:
|
|
||||||
response.status_code = 200
|
|
||||||
response.payload = [x.to_safe_dict() for x in User.get_all_users(db.root(), limit=limit) if not x.hidden]
|
|
||||||
send_response(conn, response, req)
|
|
||||||
|
|
||||||
def handle_user_update(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
"""
|
|
||||||
"status": str 300 cutoff
|
|
||||||
"bio": str 4k cutoff
|
|
||||||
"socials": list[str] each 300 cutoff
|
|
||||||
"email": str (must be an e-mail) validate with valid_email function from util
|
|
||||||
"location": str 1000 char cutoff
|
|
||||||
"""
|
|
||||||
username = ax25.Address(conn.remote_callsign).call.upper().strip()
|
|
||||||
logging.debug(f"Handling user update request for {username}: {req.payload}")
|
|
||||||
|
|
||||||
email = None
|
|
||||||
bio = None
|
|
||||||
socials = None
|
|
||||||
location = None
|
|
||||||
status = None
|
|
||||||
|
|
||||||
# set vars
|
|
||||||
|
|
||||||
if 'bio' in req.payload:
|
|
||||||
bio = str(req.payload['bio'])
|
|
||||||
|
|
||||||
if 'location' in req.payload:
|
|
||||||
location = str(req.payload['location'])
|
|
||||||
|
|
||||||
if 'status' in req.payload:
|
|
||||||
status = str(req.payload['status'])
|
|
||||||
|
|
||||||
if 'email' in req.payload:
|
|
||||||
email = req.payload['email']
|
|
||||||
if not email_valid(email):
|
|
||||||
send_blank_response(conn, req, status_code=400, payload="email must be valid format")
|
|
||||||
return
|
|
||||||
|
|
||||||
if 'socials' in req.payload:
|
|
||||||
var_socials = req.payload['socials']
|
|
||||||
socials = []
|
|
||||||
if type(var_socials) is list:
|
|
||||||
for s in var_socials:
|
|
||||||
socials.append(str(s))
|
|
||||||
else:
|
|
||||||
socials.append(str(var_socials))
|
|
||||||
try:
|
|
||||||
with db.transaction() as db:
|
|
||||||
user = User.get_user_by_username(username, db.root())
|
|
||||||
if email is not None:
|
|
||||||
user.email = email
|
|
||||||
if bio is not None:
|
|
||||||
user.bio = bio
|
|
||||||
if socials is not None:
|
|
||||||
user.socials = socials
|
|
||||||
if location is not None:
|
|
||||||
user.location = location
|
|
||||||
if status is not None:
|
|
||||||
user.status = status
|
|
||||||
except:
|
|
||||||
logging.error(f"Error while updating user {username}:\n{format_exc()}")
|
|
||||||
send_blank_response(conn, req, status_code=500)
|
|
||||||
return
|
|
||||||
|
|
||||||
send_blank_response(conn, req, status_code=200)
|
|
||||||
|
|
||||||
def user_root_handler(req: Request, conn: PacketServerConnection, db: ZODB.DB):
|
|
||||||
logging.debug(f"{req} being processed by user_root_handler")
|
|
||||||
if not user_authorized(conn, db):
|
|
||||||
logging.debug(f"user {conn.remote_callsign} not authorized")
|
|
||||||
send_blank_response(conn, req, status_code=401)
|
|
||||||
return
|
|
||||||
logging.debug("user is authorized")
|
|
||||||
if req.method is Request.Method.GET:
|
|
||||||
handle_user_get(req, conn, db)
|
|
||||||
elif req.method is Request.Method.UPDATE:
|
|
||||||
handle_user_update(req, conn ,db)
|
|
||||||
else:
|
|
||||||
send_blank_response(conn, req, status_code=404)
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
Metadata-Version: 2.2
|
|
||||||
Name: packetserver
|
|
||||||
Version: 0.4.1
|
|
||||||
Requires-Dist: click
|
|
||||||
Requires-Dist: pyham_pe
|
|
||||||
Requires-Dist: msgpack
|
|
||||||
Requires-Dist: pyham_ax25
|
|
||||||
Requires-Dist: ZODB
|
|
||||||
Requires-Dist: ZEO
|
|
||||||
Requires-Dist: podman
|
|
||||||
Requires-Dist: tabulate
|
|
||||||
Dynamic: requires-dist
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
setup.py
|
|
||||||
client/__init__.py
|
|
||||||
client/bulletins.py
|
|
||||||
client/jobs.py
|
|
||||||
client/messages.py
|
|
||||||
client/objects.py
|
|
||||||
client/testing.py
|
|
||||||
client/users.py
|
|
||||||
client/cli/__init__.py
|
|
||||||
client/cli/bulletin.py
|
|
||||||
client/cli/config.py
|
|
||||||
client/cli/constants.py
|
|
||||||
client/cli/db.py
|
|
||||||
client/cli/job.py
|
|
||||||
client/cli/message.py
|
|
||||||
client/cli/object.py
|
|
||||||
client/cli/server.py
|
|
||||||
client/cli/util.py
|
|
||||||
common/__init__.py
|
|
||||||
common/constants.py
|
|
||||||
common/testing.py
|
|
||||||
common/util.py
|
|
||||||
packetserver.egg-info/PKG-INFO
|
|
||||||
packetserver.egg-info/SOURCES.txt
|
|
||||||
packetserver.egg-info/dependency_links.txt
|
|
||||||
packetserver.egg-info/entry_points.txt
|
|
||||||
packetserver.egg-info/requires.txt
|
|
||||||
packetserver.egg-info/top_level.txt
|
|
||||||
runner/__init__.py
|
|
||||||
runner/constants.py
|
|
||||||
runner/podman.py
|
|
||||||
server/__init__.py
|
|
||||||
server/bulletin.py
|
|
||||||
server/constants.py
|
|
||||||
server/db.py
|
|
||||||
server/jobs.py
|
|
||||||
server/messages.py
|
|
||||||
server/objects.py
|
|
||||||
server/requests.py
|
|
||||||
server/testserver.py
|
|
||||||
server/users.py
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
[console_scripts]
|
|
||||||
packcli = packetserver.client.cli:cli
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
click
|
|
||||||
pyham_pe
|
|
||||||
msgpack
|
|
||||||
pyham_ax25
|
|
||||||
ZODB
|
|
||||||
ZEO
|
|
||||||
podman
|
|
||||||
tabulate
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
packetserver
|
|
||||||
Reference in New Issue
Block a user