Lots of object and job related tweaks.
This commit is contained in:
@@ -6,7 +6,7 @@ class Settings(BaseSettings):
|
||||
"""
|
||||
# Define your settings fields with type hints and optional default values
|
||||
name: str = "PacketServer"
|
||||
zeo_file: str
|
||||
zeo_file: str = ""
|
||||
operator: str | None = None
|
||||
debug_mode: bool = False
|
||||
log_level: str = "info"
|
||||
|
||||
@@ -16,7 +16,9 @@ from packetserver.http.database import DbDependency
|
||||
from packetserver.server.jobs import Job, JobStatus
|
||||
from packetserver.http.server import templates
|
||||
from packetserver.server.db import get_user_db_json
|
||||
from packetserver.server.jobs import RunnerFile
|
||||
from packetserver.server.jobs import RunnerFile, add_object_to_file_list
|
||||
from packetserver.server.objects import Object
|
||||
from packetserver.server.users import User
|
||||
|
||||
router = APIRouter(prefix="/api/v1", tags=["jobs"])
|
||||
dashboard_router = APIRouter(tags=["jobs"])
|
||||
@@ -54,6 +56,7 @@ class JobCreate(BaseModel):
|
||||
cmd: Union[str, List[str]]
|
||||
env: Optional[Dict[str, str]] = None
|
||||
files: Optional[Dict[str, str]] = None
|
||||
objs: Optional[List[str]] = None
|
||||
|
||||
@router.get("/jobs", response_model=List[JobSummary])
|
||||
async def list_user_jobs(
|
||||
@@ -173,8 +176,10 @@ async def create_job_from_form(
|
||||
files: List[UploadFile] = File(default=[]),
|
||||
include_db: Optional[str] = Form(None),
|
||||
shell_mode: Optional[str] = Form(None),
|
||||
objs: Optional[str] = Form(None),
|
||||
current_user: HttpUser = Depends(get_current_http_user)
|
||||
):
|
||||
logging.debug("new job form post received")
|
||||
# Build env dict from parallel lists
|
||||
env = {}
|
||||
for k, v in zip(env_keys, env_values):
|
||||
@@ -203,20 +208,26 @@ async def create_job_from_form(
|
||||
# get_user_db_json needs the raw ZODB.DB instance
|
||||
user_db_bytes = get_user_db_json(username_lower,db)
|
||||
# Base64-encode for payload consistency
|
||||
b64_db = base64.b64encode(user_db_bytes).decode('ascii')
|
||||
b64_db = base64.b64encode(user_db_bytes).decode('utf-8')
|
||||
logging.debug(f"DB base64: f{b64_db}")
|
||||
files_dict["user-db.json.gz"] = b64_db
|
||||
logging.debug(f"Injected user-db.json.gz for {current_user.username}")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to generate user-db.json.gz: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to include user database")
|
||||
|
||||
objs_list = None
|
||||
if objs:
|
||||
objs_list = [uuid_str.strip() for uuid_str in objs.split(",") if uuid_str.strip()]
|
||||
|
||||
# Prepare payload for the existing API
|
||||
payload = {
|
||||
"cmd": cmd_args,
|
||||
"env": env if env else None,
|
||||
"files": files_dict if files_dict else None
|
||||
"files": files_dict if files_dict else None,
|
||||
"objs": objs_list,
|
||||
}
|
||||
|
||||
logging.debug("Calling internal API to create the job")
|
||||
# Call the API internally
|
||||
response = await create_job(
|
||||
payload=JobCreate(**{k: v for k, v in payload.items() if v is not None}),
|
||||
@@ -225,6 +236,7 @@ async def create_job_from_form(
|
||||
)
|
||||
|
||||
# Redirect to the new job detail page
|
||||
logging.debug("Job queued")
|
||||
return RedirectResponse(url=f"/jobs/{response.id}", status_code=303)
|
||||
|
||||
@dashboard_router.get("/jobs/{jid}", response_class=HTMLResponse)
|
||||
@@ -252,7 +264,7 @@ async def create_job(
|
||||
current_user: HttpUser = Depends(get_current_http_user)
|
||||
):
|
||||
username = current_user.username.upper().strip()
|
||||
|
||||
logging.debug(f"New job create function called for '{username}'")
|
||||
try:
|
||||
# Process files: convert base64 dict to list of RunnerFile
|
||||
runner_files = []
|
||||
@@ -263,8 +275,26 @@ async def create_job(
|
||||
runner_files.append(RunnerFile(filename, data=data_bytes))
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid base64 for file {filename}")
|
||||
|
||||
# Handle attached objects by UUID
|
||||
logging.debug("handling objects")
|
||||
if payload.objs:
|
||||
logging.debug("Adding objects to files list")
|
||||
for obj_uuid_str in payload.objs:
|
||||
try:
|
||||
with db.transaction() as conn:
|
||||
add_object_to_file_list(obj_uuid_str, runner_files, username, conn)
|
||||
except KeyError as ke:
|
||||
raise HTTPException(status_code=404, detail=f"Object not found: {str(ke)}")
|
||||
except Exception as exc: # Catches permission issues or invalid UUID
|
||||
raise HTTPException(status_code=403 if "private" in str(exc).lower() else 400,
|
||||
detail=f"Cannot attach object {obj_uuid_str}: {str(exc)}")
|
||||
logging.debug("Creating job instance now")
|
||||
# Create the Job instance
|
||||
if type(payload.cmd) is str:
|
||||
payload.cmd = payload.cmd.replace('\r', '')
|
||||
else:
|
||||
for i in range(0,len(payload.cmd)):
|
||||
payload.cmd[i] = payload.cmd[i].replace('\r', '')
|
||||
new_job = Job(
|
||||
cmd=payload.cmd,
|
||||
owner=username,
|
||||
@@ -277,7 +307,7 @@ async def create_job(
|
||||
new_jid = new_job.queue(root)
|
||||
|
||||
logging.info(f"User {username} queued job {new_jid}: {payload.cmd} with {len(runner_files)} files")
|
||||
|
||||
logging.debug("New job created.")
|
||||
return JobSummary(
|
||||
id=new_jid,
|
||||
cmd=new_job.cmd,
|
||||
|
||||
@@ -91,7 +91,7 @@ async def upload_object(
|
||||
if force_text:
|
||||
try:
|
||||
text_content = content.decode('utf-8', errors='strict')
|
||||
object_data = text_content # str → will set binary=False
|
||||
object_data = text_content.replace('\r', '') # str → will set binary=False
|
||||
except UnicodeDecodeError:
|
||||
raise HTTPException(status_code=400, detail="Content is not valid UTF-8 and cannot be forced as text")
|
||||
else:
|
||||
@@ -169,6 +169,7 @@ async def create_text_object(
|
||||
# Validate text
|
||||
if not text:
|
||||
raise HTTPException(status_code=400, detail="Text content cannot be empty")
|
||||
text = text.replace("\r", "")
|
||||
|
||||
# Normalize name (optional, default like original)
|
||||
obj_name = (name or "text_object.txt").strip()
|
||||
|
||||
@@ -73,6 +73,7 @@ async def update_object(
|
||||
payload["private"] = (private == "on")
|
||||
|
||||
if new_text is not None and new_text.strip():
|
||||
new_text = new_text.replace("\r","")
|
||||
payload["data_text"] = new_text.strip()
|
||||
elif new_file and new_file.filename:
|
||||
content = await new_file.read()
|
||||
|
||||
@@ -41,6 +41,11 @@
|
||||
<input type="file" name="files" class="form-control" multiple>
|
||||
<div class="form-text">Uploaded files will be available in the container's working directory.</div>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label class="form-label">Attach Existing Objects by UUID</label>
|
||||
<input type="text" name="objs" class="form-control" placeholder="e.g. 123e4567-e89b-12d3-a456-426614174000, another-uuid-here">
|
||||
<div class="form-text">Comma-separated list of object UUIDs (your own or public). Leave blank for none.</div>
|
||||
</div>
|
||||
|
||||
<div class="mb-3 form-check">
|
||||
<input type="checkbox" class="form-check-input" id="include_db" name="include_db" value="on">
|
||||
|
||||
@@ -46,6 +46,8 @@ class PodmanRunner(Runner):
|
||||
self.env['PACKETSERVER_JOBID'] = str(job_id)
|
||||
self.job_path = os.path.join("/home", self.username, ".packetserver", str(job_id))
|
||||
self.archive_path = os.path.join("/artifact_output", f"{str(job_id)}.tar.gz")
|
||||
self.env['PACKETSERVER_JOB_HOME'] = self.job_path
|
||||
self.env['PACKETSERVER_ARTIFACT_DIR'] = os.path.join(self.job_path,'artifacts')
|
||||
|
||||
def thread_runner(self):
|
||||
self.status = RunnerStatus.RUNNING
|
||||
@@ -240,7 +242,7 @@ class PodmanOrchestrator(Orchestrator):
|
||||
def podman_start_user_container(self, username: str) -> Container:
|
||||
container_env = {
|
||||
"PACKETSERVER_VERSION": packetserver_version,
|
||||
"PACKETSERVER_USER": username.strip().lower()
|
||||
"PACKETSERVER_USER": username.strip().lower(),
|
||||
}
|
||||
logging.debug(f"Starting user container for {username} with command {podman_run_command}")
|
||||
con = self.client.containers.create(self.opts.image_name, name=self.get_container_name(username),
|
||||
|
||||
@@ -50,6 +50,7 @@ class Server:
|
||||
self.check_job_queue = True
|
||||
self.last_check_job_queue = datetime.datetime.now(datetime.UTC)
|
||||
self.job_check_interval = 60
|
||||
self.default_job_check_interval = 60
|
||||
self.quick_job = False
|
||||
if data_dir:
|
||||
data_path = Path(data_dir)
|
||||
@@ -131,10 +132,11 @@ class Server:
|
||||
self.last_check_job_queue = datetime.datetime.now(datetime.UTC)
|
||||
if self.quick_job:
|
||||
logging.debug("Setting the final quick job timer.")
|
||||
self.job_check_interval = 5
|
||||
if self.default_job_check_interval > 5:
|
||||
self.job_check_interval = 5
|
||||
self.quick_job = False
|
||||
else:
|
||||
self.job_check_interval = 60
|
||||
self.job_check_interval = self.default_job_check_interval
|
||||
|
||||
def server_connection_bouncer(self, conn: PacketServerConnection):
|
||||
logging.debug("new connection bouncer checking user status")
|
||||
@@ -235,7 +237,9 @@ class Server:
|
||||
with self.db.transaction() as storage:
|
||||
if 'job_check_interval' in storage.root.config:
|
||||
try:
|
||||
self.job_check_interval = int(storage.root.config['job_check_interval'])
|
||||
self.default_job_check_interval = int(storage.root.config['job_check_interval'])
|
||||
if self.job_check_interval > self.default_job_check_interval:
|
||||
self.job_check_interval = self.default_job_check_interval
|
||||
except:
|
||||
logging.warning(f"Invalid config value for 'job_check_interval'")
|
||||
# queue as many jobs as possible
|
||||
|
||||
@@ -64,22 +64,26 @@ def get_new_job_id(root: PersistentMapping) -> int:
|
||||
def add_object_to_file_list(object_id: Union[str,UUID], file_list: List[RunnerFile], username: str, conn: Connection):
|
||||
if type(object_id) is str:
|
||||
object_id = UUID(object_id)
|
||||
|
||||
logging.debug("Adding an object to file list for new job.")
|
||||
root = conn.root()
|
||||
logging.debug("Got db root from transaction/connection object.")
|
||||
obj = Object.get_object_by_uuid(object_id, root)
|
||||
logging.debug(f"Looked up object {obj}")
|
||||
if obj is None:
|
||||
raise KeyError(f"Object '{object_id}' does not exist.")
|
||||
if obj.private:
|
||||
owner_uuid = obj.owner
|
||||
owner = User.get_user_by_uuid(owner_uuid, root)
|
||||
logging.debug(f"Looked up owner of object: {owner}")
|
||||
if not (owner.username.lower() == username.lower()):
|
||||
raise PermissionError(f"Specified object {object_id} not public and not owned by user.")
|
||||
logging.debug("Checking paths now.")
|
||||
unique_path = obj.name
|
||||
runner_paths = []
|
||||
for i in file_list:
|
||||
runner_paths.append(i.destination_path)
|
||||
suffix = 1
|
||||
while unique_path not in runner_paths:
|
||||
while unique_path in runner_paths:
|
||||
unique_path = obj.name + f"_{suffix}"
|
||||
suffix = suffix + 1
|
||||
|
||||
|
||||
@@ -129,7 +129,7 @@ class User(persistent.Persistent):
|
||||
uid = uuid.UUID(str(user_uuid))
|
||||
for user in db_root['users']:
|
||||
if uid == db_root['users'][user].uuid:
|
||||
return db_root['users'][user].uuid
|
||||
return db_root['users'][user]
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
Reference in New Issue
Block a user