Skip to content

job

download_job_logs(job_id, user=Depends(current_active_superuser), db=Depends(get_async_db)) async

Download job folder

Source code in fractal_server/app/routes/admin/v2/job.py
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
@router.get("/{job_id}/download/", response_class=StreamingResponse)
async def download_job_logs(
    job_id: int,
    user: UserOAuth = Depends(current_active_superuser),
    db: AsyncSession = Depends(get_async_db),
) -> StreamingResponse:
    """
    Download job folder
    """
    # Get job from DB
    job = await db.get(JobV2, job_id)
    if job is None:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail=f"Job {job_id} not found",
        )
    # Create and return byte stream for zipped log folder
    PREFIX_ZIP = Path(job.working_dir).name
    zip_filename = f"{PREFIX_ZIP}_archive.zip"
    return StreamingResponse(
        _zip_folder_to_byte_stream_iterator(folder=job.working_dir),
        media_type="application/x-zip-compressed",
        headers={"Content-Disposition": f"attachment;filename={zip_filename}"},
    )

stop_job(job_id, user=Depends(current_active_superuser), db=Depends(get_async_db)) async

Stop execution of a workflow job.

Source code in fractal_server/app/routes/admin/v2/job.py
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
@router.get("/{job_id}/stop/", status_code=202)
async def stop_job(
    job_id: int,
    user: UserOAuth = Depends(current_active_superuser),
    db: AsyncSession = Depends(get_async_db),
) -> Response:
    """
    Stop execution of a workflow job.
    """

    _check_shutdown_is_supported()

    job = await db.get(JobV2, job_id)
    if job is None:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail=f"Job {job_id} not found",
        )

    _write_shutdown_file(job=job)

    return Response(status_code=status.HTTP_202_ACCEPTED)

update_job(job_update, job_id, user=Depends(current_active_superuser), db=Depends(get_async_db)) async

Change the status of an existing job.

This endpoint is only open to superusers, and it does not apply project-based access-control to jobs.

Source code in fractal_server/app/routes/admin/v2/job.py
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
@router.patch("/{job_id}/", response_model=JobReadV2)
async def update_job(
    job_update: JobUpdateV2,
    job_id: int,
    user: UserOAuth = Depends(current_active_superuser),
    db: AsyncSession = Depends(get_async_db),
) -> Optional[JobReadV2]:
    """
    Change the status of an existing job.

    This endpoint is only open to superusers, and it does not apply
    project-based access-control to jobs.
    """
    job = await db.get(JobV2, job_id)
    if job is None:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail=f"Job {job_id} not found",
        )

    if job_update.status != JobStatusTypeV2.FAILED:
        raise HTTPException(
            status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
            detail=f"Cannot set job status to {job_update.status}",
        )

    setattr(job, "status", job_update.status)
    setattr(job, "end_timestamp", get_timestamp())
    await db.commit()
    await db.refresh(job)
    await db.close()
    return job

view_job(id=None, user_id=None, project_id=None, dataset_id=None, workflow_id=None, status=None, start_timestamp_min=None, start_timestamp_max=None, end_timestamp_min=None, end_timestamp_max=None, log=True, user=Depends(current_active_superuser), db=Depends(get_async_db)) async

Query ApplyWorkflow table.

Parameters:

Name Type Description Default
id Optional[int]

If not None, select a given applyworkflow.id.

None
project_id Optional[int]

If not None, select a given applyworkflow.project_id.

None
dataset_id Optional[int]

If not None, select a given applyworkflow.input_dataset_id.

None
workflow_id Optional[int]

If not None, select a given applyworkflow.workflow_id.

None
status Optional[JobStatusTypeV2]

If not None, select a given applyworkflow.status.

None
start_timestamp_min Optional[datetime]

If not None, select a rows with start_timestamp after start_timestamp_min.

None
start_timestamp_max Optional[datetime]

If not None, select a rows with start_timestamp before start_timestamp_min.

None
end_timestamp_min Optional[datetime]

If not None, select a rows with end_timestamp after end_timestamp_min.

None
end_timestamp_max Optional[datetime]

If not None, select a rows with end_timestamp before end_timestamp_min.

None
log bool

If True, include job.log, if False job.log is set to None.

True
Source code in fractal_server/app/routes/admin/v2/job.py
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
@router.get("/", response_model=list[JobReadV2])
async def view_job(
    id: Optional[int] = None,
    user_id: Optional[int] = None,
    project_id: Optional[int] = None,
    dataset_id: Optional[int] = None,
    workflow_id: Optional[int] = None,
    status: Optional[JobStatusTypeV2] = None,
    start_timestamp_min: Optional[datetime] = None,
    start_timestamp_max: Optional[datetime] = None,
    end_timestamp_min: Optional[datetime] = None,
    end_timestamp_max: Optional[datetime] = None,
    log: bool = True,
    user: UserOAuth = Depends(current_active_superuser),
    db: AsyncSession = Depends(get_async_db),
) -> list[JobReadV2]:
    """
    Query `ApplyWorkflow` table.

    Args:
        id: If not `None`, select a given `applyworkflow.id`.
        project_id: If not `None`, select a given `applyworkflow.project_id`.
        dataset_id: If not `None`, select a given
            `applyworkflow.input_dataset_id`.
        workflow_id: If not `None`, select a given `applyworkflow.workflow_id`.
        status: If not `None`, select a given `applyworkflow.status`.
        start_timestamp_min: If not `None`, select a rows with
            `start_timestamp` after `start_timestamp_min`.
        start_timestamp_max: If not `None`, select a rows with
            `start_timestamp` before `start_timestamp_min`.
        end_timestamp_min: If not `None`, select a rows with `end_timestamp`
            after `end_timestamp_min`.
        end_timestamp_max: If not `None`, select a rows with `end_timestamp`
            before `end_timestamp_min`.
        log: If `True`, include `job.log`, if `False`
            `job.log` is set to `None`.
    """

    _raise_if_naive_datetime(
        start_timestamp_min,
        start_timestamp_max,
        end_timestamp_min,
        end_timestamp_max,
    )

    stm = select(JobV2)

    if id is not None:
        stm = stm.where(JobV2.id == id)
    if user_id is not None:
        stm = stm.join(ProjectV2).where(
            ProjectV2.user_list.any(UserOAuth.id == user_id)
        )
    if project_id is not None:
        stm = stm.where(JobV2.project_id == project_id)
    if dataset_id is not None:
        stm = stm.where(JobV2.dataset_id == dataset_id)
    if workflow_id is not None:
        stm = stm.where(JobV2.workflow_id == workflow_id)
    if status is not None:
        stm = stm.where(JobV2.status == status)
    if start_timestamp_min is not None:
        start_timestamp_min = start_timestamp_min
        stm = stm.where(JobV2.start_timestamp >= start_timestamp_min)
    if start_timestamp_max is not None:
        start_timestamp_max = start_timestamp_max
        stm = stm.where(JobV2.start_timestamp <= start_timestamp_max)
    if end_timestamp_min is not None:
        end_timestamp_min = end_timestamp_min
        stm = stm.where(JobV2.end_timestamp >= end_timestamp_min)
    if end_timestamp_max is not None:
        end_timestamp_max = end_timestamp_max
        stm = stm.where(JobV2.end_timestamp <= end_timestamp_max)

    res = await db.execute(stm)
    job_list = res.scalars().all()
    await db.close()
    if not log:
        for job in job_list:
            setattr(job, "log", None)

    return job_list