Jobs html view changes.

This commit is contained in:
Michael Woods
2025-12-26 15:53:12 -05:00
parent c81fd68ea2
commit ec0cb0ce45
2 changed files with 31 additions and 17 deletions

View File

@@ -4,6 +4,7 @@ from typing import List, Optional, Union, Tuple, Dict, Any
from pydantic import BaseModel
from datetime import datetime
import logging
import base64
from traceback import format_exc
from packetserver.http.dependencies import get_current_http_user
@@ -11,6 +12,7 @@ from packetserver.http.auth import HttpUser
from packetserver.http.database import DbDependency
from packetserver.server.jobs import Job, JobStatus
from packetserver.http.server import templates
from packetserver.runner import RunnerFile
router = APIRouter(prefix="/api/v1", tags=["jobs"])
dashboard_router = APIRouter(tags=["jobs"])
@@ -30,12 +32,12 @@ class JobDetail(JobSummary):
errors: str # base64-encoded
artifacts: List[Tuple[str, str]] # list of (filename, base64_data)
from typing import Dict
class JobCreate(BaseModel):
cmd: Union[str, List[str]]
description: Optional[str] = None
env: Optional[Dict[str, str]] = None
workdir: Optional[str] = None
artifact_paths: Optional[List[str]] = None
files: Optional[Dict[str, str]] = None
@router.get("/jobs", response_model=List[JobSummary])
async def list_user_jobs(
@@ -157,25 +159,32 @@ async def create_job(
username = current_user.username.upper().strip()
try:
with db.transaction() as conn:
root = conn.root()
# Process files: convert base64 dict to list of RunnerFile
runner_files = []
if payload.files:
for filename, b64_data in payload.files.items():
try:
data_bytes = base64.b64decode(b64_data)
runner_files.append(RunnerFile(filename, data=data_bytes))
except Exception as e:
raise HTTPException(status_code=400, detail=f"Invalid base64 for file {filename}")
# Queue the job using existing method
new_job = Job.queue(
# Create the Job instance
new_job = Job(
cmd=payload.cmd,
owner=username,
description=payload.description or "",
env=payload.env or {},
workdir=payload.workdir or "",
artifact_paths=payload.artifact_paths or [],
db=root # or root, depending on your queue() signature
files=runner_files
)
logging.info(f"User {username} queued job {new_job.id}: {payload.cmd}")
with db.transaction() as conn:
root = conn.root()
new_jid = new_job.queue(root)
logging.info(f"User {username} queued job {new_jid}: {payload.cmd} with {len(runner_files)} files")
# Return summary (reuse the same format as list)
return JobSummary(
id=new_job.id,
id=new_jid,
cmd=new_job.cmd,
owner=new_job.owner,
created_at=new_job.created_at,
@@ -185,6 +194,10 @@ async def create_job(
return_code=new_job.return_code
)
except ValueError as ve:
raise HTTPException(status_code=400, detail=str(ve))
except HTTPException:
raise
except Exception as e:
logging.error(f"Job creation failed for {username}: {e}\n{format_exc()}")
raise HTTPException(status_code=500, detail="Failed to queue job")

View File

@@ -202,6 +202,7 @@ class Job(persistent.Persistent):
"return_code": self.return_code,
"artifacts": [],
"status": self.status.name,
"env": self.env,
"id": self.id
}
if include_data: