Lots of object and job related tweaks.

This commit is contained in:
Michael Woods
2025-12-28 15:36:58 -05:00
parent 04d34fdf32
commit 55b0e1806b
9 changed files with 63 additions and 16 deletions

View File

@@ -6,7 +6,7 @@ class Settings(BaseSettings):
""" """
# Define your settings fields with type hints and optional default values # Define your settings fields with type hints and optional default values
name: str = "PacketServer" name: str = "PacketServer"
zeo_file: str zeo_file: str = ""
operator: str | None = None operator: str | None = None
debug_mode: bool = False debug_mode: bool = False
log_level: str = "info" log_level: str = "info"

View File

@@ -16,7 +16,9 @@ from packetserver.http.database import DbDependency
from packetserver.server.jobs import Job, JobStatus from packetserver.server.jobs import Job, JobStatus
from packetserver.http.server import templates from packetserver.http.server import templates
from packetserver.server.db import get_user_db_json from packetserver.server.db import get_user_db_json
from packetserver.server.jobs import RunnerFile from packetserver.server.jobs import RunnerFile, add_object_to_file_list
from packetserver.server.objects import Object
from packetserver.server.users import User
router = APIRouter(prefix="/api/v1", tags=["jobs"]) router = APIRouter(prefix="/api/v1", tags=["jobs"])
dashboard_router = APIRouter(tags=["jobs"]) dashboard_router = APIRouter(tags=["jobs"])
@@ -54,6 +56,7 @@ class JobCreate(BaseModel):
cmd: Union[str, List[str]] cmd: Union[str, List[str]]
env: Optional[Dict[str, str]] = None env: Optional[Dict[str, str]] = None
files: Optional[Dict[str, str]] = None files: Optional[Dict[str, str]] = None
objs: Optional[List[str]] = None
@router.get("/jobs", response_model=List[JobSummary]) @router.get("/jobs", response_model=List[JobSummary])
async def list_user_jobs( async def list_user_jobs(
@@ -173,8 +176,10 @@ async def create_job_from_form(
files: List[UploadFile] = File(default=[]), files: List[UploadFile] = File(default=[]),
include_db: Optional[str] = Form(None), include_db: Optional[str] = Form(None),
shell_mode: Optional[str] = Form(None), shell_mode: Optional[str] = Form(None),
objs: Optional[str] = Form(None),
current_user: HttpUser = Depends(get_current_http_user) current_user: HttpUser = Depends(get_current_http_user)
): ):
logging.debug("new job form post received")
# Build env dict from parallel lists # Build env dict from parallel lists
env = {} env = {}
for k, v in zip(env_keys, env_values): for k, v in zip(env_keys, env_values):
@@ -203,20 +208,26 @@ async def create_job_from_form(
# get_user_db_json needs the raw ZODB.DB instance # get_user_db_json needs the raw ZODB.DB instance
user_db_bytes = get_user_db_json(username_lower,db) user_db_bytes = get_user_db_json(username_lower,db)
# Base64-encode for payload consistency # Base64-encode for payload consistency
b64_db = base64.b64encode(user_db_bytes).decode('ascii') b64_db = base64.b64encode(user_db_bytes).decode('utf-8')
logging.debug(f"DB base64: f{b64_db}")
files_dict["user-db.json.gz"] = b64_db files_dict["user-db.json.gz"] = b64_db
logging.debug(f"Injected user-db.json.gz for {current_user.username}") logging.debug(f"Injected user-db.json.gz for {current_user.username}")
except Exception as e: except Exception as e:
logging.error(f"Failed to generate user-db.json.gz: {e}") logging.error(f"Failed to generate user-db.json.gz: {e}")
raise HTTPException(status_code=500, detail="Failed to include user database") raise HTTPException(status_code=500, detail="Failed to include user database")
objs_list = None
if objs:
objs_list = [uuid_str.strip() for uuid_str in objs.split(",") if uuid_str.strip()]
# Prepare payload for the existing API # Prepare payload for the existing API
payload = { payload = {
"cmd": cmd_args, "cmd": cmd_args,
"env": env if env else None, "env": env if env else None,
"files": files_dict if files_dict else None "files": files_dict if files_dict else None,
"objs": objs_list,
} }
logging.debug("Calling internal API to create the job")
# Call the API internally # Call the API internally
response = await create_job( response = await create_job(
payload=JobCreate(**{k: v for k, v in payload.items() if v is not None}), payload=JobCreate(**{k: v for k, v in payload.items() if v is not None}),
@@ -225,6 +236,7 @@ async def create_job_from_form(
) )
# Redirect to the new job detail page # Redirect to the new job detail page
logging.debug("Job queued")
return RedirectResponse(url=f"/jobs/{response.id}", status_code=303) return RedirectResponse(url=f"/jobs/{response.id}", status_code=303)
@dashboard_router.get("/jobs/{jid}", response_class=HTMLResponse) @dashboard_router.get("/jobs/{jid}", response_class=HTMLResponse)
@@ -252,7 +264,7 @@ async def create_job(
current_user: HttpUser = Depends(get_current_http_user) current_user: HttpUser = Depends(get_current_http_user)
): ):
username = current_user.username.upper().strip() username = current_user.username.upper().strip()
logging.debug(f"New job create function called for '{username}'")
try: try:
# Process files: convert base64 dict to list of RunnerFile # Process files: convert base64 dict to list of RunnerFile
runner_files = [] runner_files = []
@@ -263,8 +275,26 @@ async def create_job(
runner_files.append(RunnerFile(filename, data=data_bytes)) runner_files.append(RunnerFile(filename, data=data_bytes))
except Exception as e: except Exception as e:
raise HTTPException(status_code=400, detail=f"Invalid base64 for file {filename}") raise HTTPException(status_code=400, detail=f"Invalid base64 for file {filename}")
# Handle attached objects by UUID
logging.debug("handling objects")
if payload.objs:
logging.debug("Adding objects to files list")
for obj_uuid_str in payload.objs:
try:
with db.transaction() as conn:
add_object_to_file_list(obj_uuid_str, runner_files, username, conn)
except KeyError as ke:
raise HTTPException(status_code=404, detail=f"Object not found: {str(ke)}")
except Exception as exc: # Catches permission issues or invalid UUID
raise HTTPException(status_code=403 if "private" in str(exc).lower() else 400,
detail=f"Cannot attach object {obj_uuid_str}: {str(exc)}")
logging.debug("Creating job instance now")
# Create the Job instance # Create the Job instance
if type(payload.cmd) is str:
payload.cmd = payload.cmd.replace('\r', '')
else:
for i in range(0,len(payload.cmd)):
payload.cmd[i] = payload.cmd[i].replace('\r', '')
new_job = Job( new_job = Job(
cmd=payload.cmd, cmd=payload.cmd,
owner=username, owner=username,
@@ -277,7 +307,7 @@ async def create_job(
new_jid = new_job.queue(root) new_jid = new_job.queue(root)
logging.info(f"User {username} queued job {new_jid}: {payload.cmd} with {len(runner_files)} files") logging.info(f"User {username} queued job {new_jid}: {payload.cmd} with {len(runner_files)} files")
logging.debug("New job created.")
return JobSummary( return JobSummary(
id=new_jid, id=new_jid,
cmd=new_job.cmd, cmd=new_job.cmd,

View File

@@ -91,7 +91,7 @@ async def upload_object(
if force_text: if force_text:
try: try:
text_content = content.decode('utf-8', errors='strict') text_content = content.decode('utf-8', errors='strict')
object_data = text_content # str → will set binary=False object_data = text_content.replace('\r', '') # str → will set binary=False
except UnicodeDecodeError: except UnicodeDecodeError:
raise HTTPException(status_code=400, detail="Content is not valid UTF-8 and cannot be forced as text") raise HTTPException(status_code=400, detail="Content is not valid UTF-8 and cannot be forced as text")
else: else:
@@ -169,6 +169,7 @@ async def create_text_object(
# Validate text # Validate text
if not text: if not text:
raise HTTPException(status_code=400, detail="Text content cannot be empty") raise HTTPException(status_code=400, detail="Text content cannot be empty")
text = text.replace("\r", "")
# Normalize name (optional, default like original) # Normalize name (optional, default like original)
obj_name = (name or "text_object.txt").strip() obj_name = (name or "text_object.txt").strip()

View File

@@ -73,6 +73,7 @@ async def update_object(
payload["private"] = (private == "on") payload["private"] = (private == "on")
if new_text is not None and new_text.strip(): if new_text is not None and new_text.strip():
new_text = new_text.replace("\r","")
payload["data_text"] = new_text.strip() payload["data_text"] = new_text.strip()
elif new_file and new_file.filename: elif new_file and new_file.filename:
content = await new_file.read() content = await new_file.read()

View File

@@ -41,6 +41,11 @@
<input type="file" name="files" class="form-control" multiple> <input type="file" name="files" class="form-control" multiple>
<div class="form-text">Uploaded files will be available in the container's working directory.</div> <div class="form-text">Uploaded files will be available in the container's working directory.</div>
</div> </div>
<div class="mb-3">
<label class="form-label">Attach Existing Objects by UUID</label>
<input type="text" name="objs" class="form-control" placeholder="e.g. 123e4567-e89b-12d3-a456-426614174000, another-uuid-here">
<div class="form-text">Comma-separated list of object UUIDs (your own or public). Leave blank for none.</div>
</div>
<div class="mb-3 form-check"> <div class="mb-3 form-check">
<input type="checkbox" class="form-check-input" id="include_db" name="include_db" value="on"> <input type="checkbox" class="form-check-input" id="include_db" name="include_db" value="on">

View File

@@ -46,6 +46,8 @@ class PodmanRunner(Runner):
self.env['PACKETSERVER_JOBID'] = str(job_id) self.env['PACKETSERVER_JOBID'] = str(job_id)
self.job_path = os.path.join("/home", self.username, ".packetserver", str(job_id)) self.job_path = os.path.join("/home", self.username, ".packetserver", str(job_id))
self.archive_path = os.path.join("/artifact_output", f"{str(job_id)}.tar.gz") self.archive_path = os.path.join("/artifact_output", f"{str(job_id)}.tar.gz")
self.env['PACKETSERVER_JOB_HOME'] = self.job_path
self.env['PACKETSERVER_ARTIFACT_DIR'] = os.path.join(self.job_path,'artifacts')
def thread_runner(self): def thread_runner(self):
self.status = RunnerStatus.RUNNING self.status = RunnerStatus.RUNNING
@@ -240,7 +242,7 @@ class PodmanOrchestrator(Orchestrator):
def podman_start_user_container(self, username: str) -> Container: def podman_start_user_container(self, username: str) -> Container:
container_env = { container_env = {
"PACKETSERVER_VERSION": packetserver_version, "PACKETSERVER_VERSION": packetserver_version,
"PACKETSERVER_USER": username.strip().lower() "PACKETSERVER_USER": username.strip().lower(),
} }
logging.debug(f"Starting user container for {username} with command {podman_run_command}") logging.debug(f"Starting user container for {username} with command {podman_run_command}")
con = self.client.containers.create(self.opts.image_name, name=self.get_container_name(username), con = self.client.containers.create(self.opts.image_name, name=self.get_container_name(username),

View File

@@ -50,6 +50,7 @@ class Server:
self.check_job_queue = True self.check_job_queue = True
self.last_check_job_queue = datetime.datetime.now(datetime.UTC) self.last_check_job_queue = datetime.datetime.now(datetime.UTC)
self.job_check_interval = 60 self.job_check_interval = 60
self.default_job_check_interval = 60
self.quick_job = False self.quick_job = False
if data_dir: if data_dir:
data_path = Path(data_dir) data_path = Path(data_dir)
@@ -131,10 +132,11 @@ class Server:
self.last_check_job_queue = datetime.datetime.now(datetime.UTC) self.last_check_job_queue = datetime.datetime.now(datetime.UTC)
if self.quick_job: if self.quick_job:
logging.debug("Setting the final quick job timer.") logging.debug("Setting the final quick job timer.")
if self.default_job_check_interval > 5:
self.job_check_interval = 5 self.job_check_interval = 5
self.quick_job = False self.quick_job = False
else: else:
self.job_check_interval = 60 self.job_check_interval = self.default_job_check_interval
def server_connection_bouncer(self, conn: PacketServerConnection): def server_connection_bouncer(self, conn: PacketServerConnection):
logging.debug("new connection bouncer checking user status") logging.debug("new connection bouncer checking user status")
@@ -235,7 +237,9 @@ class Server:
with self.db.transaction() as storage: with self.db.transaction() as storage:
if 'job_check_interval' in storage.root.config: if 'job_check_interval' in storage.root.config:
try: try:
self.job_check_interval = int(storage.root.config['job_check_interval']) self.default_job_check_interval = int(storage.root.config['job_check_interval'])
if self.job_check_interval > self.default_job_check_interval:
self.job_check_interval = self.default_job_check_interval
except: except:
logging.warning(f"Invalid config value for 'job_check_interval'") logging.warning(f"Invalid config value for 'job_check_interval'")
# queue as many jobs as possible # queue as many jobs as possible

View File

@@ -64,22 +64,26 @@ def get_new_job_id(root: PersistentMapping) -> int:
def add_object_to_file_list(object_id: Union[str,UUID], file_list: List[RunnerFile], username: str, conn: Connection): def add_object_to_file_list(object_id: Union[str,UUID], file_list: List[RunnerFile], username: str, conn: Connection):
if type(object_id) is str: if type(object_id) is str:
object_id = UUID(object_id) object_id = UUID(object_id)
logging.debug("Adding an object to file list for new job.")
root = conn.root() root = conn.root()
logging.debug("Got db root from transaction/connection object.")
obj = Object.get_object_by_uuid(object_id, root) obj = Object.get_object_by_uuid(object_id, root)
logging.debug(f"Looked up object {obj}")
if obj is None: if obj is None:
raise KeyError(f"Object '{object_id}' does not exist.") raise KeyError(f"Object '{object_id}' does not exist.")
if obj.private: if obj.private:
owner_uuid = obj.owner owner_uuid = obj.owner
owner = User.get_user_by_uuid(owner_uuid, root) owner = User.get_user_by_uuid(owner_uuid, root)
logging.debug(f"Looked up owner of object: {owner}")
if not (owner.username.lower() == username.lower()): if not (owner.username.lower() == username.lower()):
raise PermissionError(f"Specified object {object_id} not public and not owned by user.") raise PermissionError(f"Specified object {object_id} not public and not owned by user.")
logging.debug("Checking paths now.")
unique_path = obj.name unique_path = obj.name
runner_paths = [] runner_paths = []
for i in file_list: for i in file_list:
runner_paths.append(i.destination_path) runner_paths.append(i.destination_path)
suffix = 1 suffix = 1
while unique_path not in runner_paths: while unique_path in runner_paths:
unique_path = obj.name + f"_{suffix}" unique_path = obj.name + f"_{suffix}"
suffix = suffix + 1 suffix = suffix + 1

View File

@@ -129,7 +129,7 @@ class User(persistent.Persistent):
uid = uuid.UUID(str(user_uuid)) uid = uuid.UUID(str(user_uuid))
for user in db_root['users']: for user in db_root['users']:
if uid == db_root['users'][user].uuid: if uid == db_root['users'][user].uuid:
return db_root['users'][user].uuid return db_root['users'][user]
except Exception: except Exception:
return None return None
return None return None