feat(ui): /ui/jobs listing page (#47)
All checks were successful
tests / test (push) Successful in 4m29s
All checks were successful
tests / test (push) Successful in 4m29s
This commit is contained in:
commit
42a0086ba1
8 changed files with 845 additions and 2 deletions
|
|
@ -8,7 +8,7 @@ Status: MVP deployed (2026-04-18) at `http://192.168.68.42:8994` — LAN only. B
|
|||
|
||||
Use cases: the built-in registry lives in `src/ix/use_cases/__init__.py` (`bank_statement_header` for MVP). Callers without a registered entry can ship an ad-hoc schema inline via `RequestIX.use_case_inline` (see README "Ad-hoc use cases"); the pipeline builds the Pydantic classes on the fly per request. The `/ui` page exposes this as a "custom" option so non-engineering users can experiment without a deploy.
|
||||
|
||||
UX notes: the `/ui` job page surfaces queue position + elapsed MM:SS on each poll, renders the client-provided filename (stored via `FileRef.display_name`, optional metadata — the pipeline ignores it for execution), and shows a CPU-mode notice when `/healthz` reports `ocr_gpu: false`.
|
||||
UX notes: the `/ui` job page surfaces queue position + elapsed MM:SS on each poll, renders the client-provided filename (stored via `FileRef.display_name`, optional metadata — the pipeline ignores it for execution), and shows a CPU-mode notice when `/healthz` reports `ocr_gpu: false`. A paginated history lives at `/ui/jobs` (status + client_id filters, newest first).
|
||||
|
||||
## Guiding Principles
|
||||
|
||||
|
|
|
|||
12
README.md
12
README.md
|
|
@ -10,6 +10,8 @@ Given a document (PDF, image, text) and a named *use case*, ix returns a structu
|
|||
|
||||
A minimal browser UI lives at [`http://192.168.68.42:8994/ui`](http://192.168.68.42:8994/ui): drop a PDF, pick a registered use case or define one inline, submit, see the pretty-printed result. HTMX polls the job status every 2 s until the pipeline finishes. LAN-only, no auth.
|
||||
|
||||
Past submissions are browsable at [`/ui/jobs`](http://192.168.68.42:8994/ui/jobs) — a paginated list (newest first) with status + `client_id` filters. Each row links to `/ui/jobs/{job_id}` for the full request/response view.
|
||||
|
||||
- Full reference spec: [`docs/spec-core-pipeline.md`](docs/spec-core-pipeline.md) (aspirational; MVP is a strict subset)
|
||||
- **MVP design:** [`docs/superpowers/specs/2026-04-18-ix-mvp-design.md`](docs/superpowers/specs/2026-04-18-ix-mvp-design.md)
|
||||
- **Implementation plan:** [`docs/superpowers/plans/2026-04-18-ix-mvp-implementation.md`](docs/superpowers/plans/2026-04-18-ix-mvp-implementation.md)
|
||||
|
|
@ -76,6 +78,16 @@ uv run pytest tests/unit -v # hermetic unit + integration sui
|
|||
IX_TEST_OLLAMA=1 uv run pytest tests/live -v # needs LAN access to Ollama + GPU
|
||||
```
|
||||
|
||||
### UI jobs list
|
||||
|
||||
`GET /ui/jobs` renders a paginated, newest-first table of submitted jobs. Query params:
|
||||
|
||||
- `status=pending|running|done|error` — repeat for multi-select.
|
||||
- `client_id=<str>` — exact match (e.g. `ui`, `mammon`).
|
||||
- `limit=<n>` (default 50, max 200) + `offset=<n>` for paging.
|
||||
|
||||
Each row shows status badge, original filename (`FileRef.display_name` or URL basename), use case, client id, submitted time + relative, and elapsed wall-clock (terminal rows only). Each row links to `/ui/jobs/{job_id}` for the full response JSON.
|
||||
|
||||
### UI queue + progress UX
|
||||
|
||||
The `/ui` job page polls `GET /ui/jobs/{id}/fragment` every 2 s and surfaces:
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ A few invariants worth stating up front:
|
|||
from __future__ import annotations
|
||||
|
||||
import secrets
|
||||
from collections.abc import Iterable
|
||||
from datetime import UTC, datetime
|
||||
from typing import TYPE_CHECKING, Literal
|
||||
from uuid import UUID, uuid4
|
||||
|
|
@ -333,6 +334,75 @@ async def sweep_orphans(
|
|||
return list(candidates)
|
||||
|
||||
|
||||
_LIST_RECENT_LIMIT_CAP = 200
|
||||
|
||||
|
||||
async def list_recent(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
status: str | Iterable[str] | None = None,
|
||||
client_id: str | None = None,
|
||||
) -> tuple[list[Job], int]:
|
||||
"""Return a page of recent jobs, newest first, plus total matching count.
|
||||
|
||||
Powers the ``/ui/jobs`` listing page. Ordering is ``created_at DESC``.
|
||||
``total`` reflects matching rows *before* limit/offset so the template
|
||||
can render "showing N of M".
|
||||
|
||||
Parameters
|
||||
----------
|
||||
limit:
|
||||
Maximum rows to return. Capped at
|
||||
:data:`_LIST_RECENT_LIMIT_CAP` (200) to bound the JSON payload
|
||||
size — callers that pass a larger value get clamped silently.
|
||||
offset:
|
||||
Non-negative row offset. Negative values raise ``ValueError``
|
||||
because the template treats offset as a page cursor; a negative
|
||||
cursor is a bug at the call site, not something to paper over.
|
||||
status:
|
||||
If set, restrict to the given status(es). Accepts a single
|
||||
:data:`Job.status` value or any iterable (list/tuple/set). Values
|
||||
outside the lifecycle enum simply match nothing — we don't try
|
||||
to validate here; the DB CHECK constraint already bounds the set.
|
||||
client_id:
|
||||
If set, exact match on :attr:`IxJob.client_id`. No substring /
|
||||
prefix match — simple and predictable.
|
||||
"""
|
||||
|
||||
if offset < 0:
|
||||
raise ValueError(f"offset must be >= 0, got {offset}")
|
||||
effective_limit = max(0, min(limit, _LIST_RECENT_LIMIT_CAP))
|
||||
|
||||
filters = []
|
||||
if status is not None:
|
||||
if isinstance(status, str):
|
||||
filters.append(IxJob.status == status)
|
||||
else:
|
||||
status_list = list(status)
|
||||
if not status_list:
|
||||
# Empty iterable → no rows match. Return a sentinel
|
||||
# IN-list that can never hit so we don't blow up.
|
||||
filters.append(IxJob.status.in_(status_list))
|
||||
else:
|
||||
filters.append(IxJob.status.in_(status_list))
|
||||
if client_id is not None:
|
||||
filters.append(IxJob.client_id == client_id)
|
||||
|
||||
total_q = select(func.count()).select_from(IxJob)
|
||||
list_q = select(IxJob).order_by(IxJob.created_at.desc())
|
||||
for f in filters:
|
||||
total_q = total_q.where(f)
|
||||
list_q = list_q.where(f)
|
||||
|
||||
total = int(await session.scalar(total_q) or 0)
|
||||
rows = (
|
||||
await session.scalars(list_q.limit(effective_limit).offset(offset))
|
||||
).all()
|
||||
return [_orm_to_job(r) for r in rows], total
|
||||
|
||||
|
||||
def _as_interval(seconds: int): # type: ignore[no-untyped-def]
|
||||
"""Return a SQL interval expression for ``seconds``.
|
||||
|
||||
|
|
|
|||
|
|
@ -26,9 +26,10 @@ from __future__ import annotations
|
|||
import json
|
||||
import time
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
from urllib.parse import unquote, urlsplit
|
||||
from urllib.parse import unquote, urlencode, urlsplit
|
||||
from uuid import UUID
|
||||
|
||||
import aiofiles
|
||||
|
|
@ -38,6 +39,7 @@ from fastapi import (
|
|||
File,
|
||||
Form,
|
||||
HTTPException,
|
||||
Query,
|
||||
Request,
|
||||
UploadFile,
|
||||
)
|
||||
|
|
@ -105,6 +107,59 @@ def _cached_ocr_gpu(probes: Probes) -> bool | None:
|
|||
return value
|
||||
|
||||
|
||||
_VALID_STATUSES = ("pending", "running", "done", "error")
|
||||
_JOBS_LIST_DEFAULT_LIMIT = 50
|
||||
_JOBS_LIST_MAX_LIMIT = 200
|
||||
|
||||
|
||||
def _use_case_label(request: RequestIX | None) -> str:
|
||||
"""Prefer inline use-case label, fall back to the registered name."""
|
||||
|
||||
if request is None:
|
||||
return "—"
|
||||
if request.use_case_inline is not None:
|
||||
return request.use_case_inline.use_case_name or request.use_case
|
||||
return request.use_case or "—"
|
||||
|
||||
|
||||
def _row_elapsed_seconds(job) -> int | None: # type: ignore[no-untyped-def]
|
||||
"""Wall-clock seconds for a terminal row (finished - started).
|
||||
|
||||
Used in the list view's "Elapsed" column. Returns ``None`` for rows
|
||||
that haven't run yet (pending / running-with-missing-started_at) so
|
||||
the template can render ``—`` instead.
|
||||
"""
|
||||
|
||||
if job.status in ("done", "error") and job.started_at and job.finished_at:
|
||||
return max(0, int((job.finished_at - job.started_at).total_seconds()))
|
||||
return None
|
||||
|
||||
|
||||
def _humanize_delta(seconds: int) -> str:
|
||||
"""Coarse-grained "N min ago" for the list view.
|
||||
|
||||
The list renders many rows; we don't need second-accuracy here. For
|
||||
sub-minute values we still say "just now" to avoid a jumpy display.
|
||||
"""
|
||||
|
||||
if seconds < 45:
|
||||
return "just now"
|
||||
mins = seconds // 60
|
||||
if mins < 60:
|
||||
return f"{mins} min ago"
|
||||
hours = mins // 60
|
||||
if hours < 24:
|
||||
return f"{hours} h ago"
|
||||
days = hours // 24
|
||||
return f"{days} d ago"
|
||||
|
||||
|
||||
def _fmt_elapsed_seconds(seconds: int | None) -> str:
|
||||
if seconds is None:
|
||||
return "—"
|
||||
return f"{seconds // 60:02d}:{seconds % 60:02d}"
|
||||
|
||||
|
||||
def _file_display_entries(
|
||||
request: RequestIX | None,
|
||||
) -> list[str]:
|
||||
|
|
@ -156,6 +211,91 @@ def build_router() -> APIRouter:
|
|||
},
|
||||
)
|
||||
|
||||
@router.get("/jobs", response_class=HTMLResponse)
|
||||
async def jobs_list(
|
||||
request: Request,
|
||||
session_factory: Annotated[
|
||||
async_sessionmaker[AsyncSession], Depends(get_session_factory_dep)
|
||||
],
|
||||
status: Annotated[list[str] | None, Query()] = None,
|
||||
client_id: Annotated[str | None, Query()] = None,
|
||||
limit: Annotated[int, Query(ge=1, le=_JOBS_LIST_MAX_LIMIT)] = _JOBS_LIST_DEFAULT_LIMIT,
|
||||
offset: Annotated[int, Query(ge=0)] = 0,
|
||||
) -> Response:
|
||||
# Drop unknown statuses silently — we don't want a stray query
|
||||
# param to 400. The filter bar only offers valid values anyway.
|
||||
status_filter: list[str] = []
|
||||
if status:
|
||||
status_filter = [s for s in status if s in _VALID_STATUSES]
|
||||
client_filter = (client_id or "").strip() or None
|
||||
|
||||
async with session_factory() as session:
|
||||
jobs, total = await jobs_repo.list_recent(
|
||||
session,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
status=status_filter if status_filter else None,
|
||||
client_id=client_filter,
|
||||
)
|
||||
|
||||
now = datetime.now(UTC)
|
||||
rows = []
|
||||
for job in jobs:
|
||||
files = _file_display_entries(job.request)
|
||||
display = files[0] if files else "—"
|
||||
created = job.created_at
|
||||
created_delta = _humanize_delta(
|
||||
int((now - created).total_seconds())
|
||||
) if created is not None else "—"
|
||||
created_local = (
|
||||
created.strftime("%Y-%m-%d %H:%M:%S")
|
||||
if created is not None
|
||||
else "—"
|
||||
)
|
||||
rows.append(
|
||||
{
|
||||
"job_id": str(job.job_id),
|
||||
"status": job.status,
|
||||
"display_name": display,
|
||||
"use_case": _use_case_label(job.request),
|
||||
"client_id": job.client_id,
|
||||
"created_at": created_local,
|
||||
"created_delta": created_delta,
|
||||
"elapsed": _fmt_elapsed_seconds(_row_elapsed_seconds(job)),
|
||||
}
|
||||
)
|
||||
|
||||
prev_offset = max(0, offset - limit) if offset > 0 else None
|
||||
next_offset = offset + limit if (offset + limit) < total else None
|
||||
|
||||
def _link(new_offset: int) -> str:
|
||||
params: list[tuple[str, str]] = []
|
||||
for s in status_filter:
|
||||
params.append(("status", s))
|
||||
if client_filter:
|
||||
params.append(("client_id", client_filter))
|
||||
params.append(("limit", str(limit)))
|
||||
params.append(("offset", str(new_offset)))
|
||||
return f"/ui/jobs?{urlencode(params)}"
|
||||
|
||||
tpl = _templates()
|
||||
return tpl.TemplateResponse(
|
||||
request,
|
||||
"jobs_list.html",
|
||||
{
|
||||
"rows": rows,
|
||||
"total": total,
|
||||
"shown": len(rows),
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"status_filter": status_filter,
|
||||
"client_filter": client_filter or "",
|
||||
"valid_statuses": _VALID_STATUSES,
|
||||
"prev_link": _link(prev_offset) if prev_offset is not None else None,
|
||||
"next_link": _link(next_offset) if next_offset is not None else None,
|
||||
},
|
||||
)
|
||||
|
||||
@router.get("/jobs/{job_id}", response_class=HTMLResponse)
|
||||
async def job_page(
|
||||
request: Request,
|
||||
|
|
|
|||
|
|
@ -53,6 +53,7 @@
|
|||
<nav class="ix-header" aria-label="InfoXtractor navigation">
|
||||
<span class="brand">InfoXtractor</span>
|
||||
<a href="/ui">Upload a new extraction</a>
|
||||
<a href="/ui/jobs">Recent jobs</a>
|
||||
{% if job %}
|
||||
<span>
|
||||
Job:
|
||||
|
|
|
|||
164
src/ix/ui/templates/jobs_list.html
Normal file
164
src/ix/ui/templates/jobs_list.html
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
<!doctype html>
|
||||
<html lang="en" data-theme="light">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>InfoXtractor — Recent jobs</title>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://cdn.jsdelivr.net/npm/@picocss/pico@2/css/pico.min.css"
|
||||
/>
|
||||
<style>
|
||||
main { padding-top: 1.5rem; padding-bottom: 4rem; }
|
||||
nav.ix-header {
|
||||
display: flex; gap: 1rem; align-items: baseline;
|
||||
padding: 0.6rem 0; border-bottom: 1px solid var(--pico-muted-border-color, #ddd);
|
||||
margin-bottom: 1rem; flex-wrap: wrap;
|
||||
}
|
||||
nav.ix-header .brand { font-weight: 700; margin-right: auto; }
|
||||
.breadcrumb {
|
||||
font-size: 0.9rem; color: var(--pico-muted-color);
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
.breadcrumb a { text-decoration: none; }
|
||||
.filter-bar {
|
||||
display: flex; flex-wrap: wrap; gap: 1rem; align-items: flex-end;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
.filter-bar fieldset { margin: 0; padding: 0; border: none; }
|
||||
.filter-bar label.inline { display: inline-flex; gap: 0.3rem; align-items: center; margin-right: 0.8rem; font-weight: normal; }
|
||||
.counter { color: var(--pico-muted-color); margin-bottom: 0.5rem; }
|
||||
table.jobs-table { width: 100%; font-size: 0.92rem; }
|
||||
table.jobs-table th { white-space: nowrap; }
|
||||
table.jobs-table td { vertical-align: middle; }
|
||||
td.col-created small { color: var(--pico-muted-color); display: block; }
|
||||
.status-badge {
|
||||
display: inline-block; padding: 0.1rem 0.55rem;
|
||||
border-radius: 0.8rem; font-size: 0.78rem; font-weight: 600;
|
||||
text-transform: uppercase; letter-spacing: 0.04em;
|
||||
}
|
||||
.status-done { background: #d1f4dc; color: #1a6d35; }
|
||||
.status-error { background: #fadadd; color: #8a1d2b; }
|
||||
.status-pending, .status-running { background: #fff1c2; color: #805600; }
|
||||
.pagination {
|
||||
display: flex; gap: 0.75rem; margin-top: 1rem;
|
||||
align-items: center; flex-wrap: wrap;
|
||||
}
|
||||
.empty-note { color: var(--pico-muted-color); font-style: italic; }
|
||||
td.col-filename code { font-size: 0.9em; word-break: break-all; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<main class="container">
|
||||
<nav class="ix-header" aria-label="InfoXtractor navigation">
|
||||
<span class="brand">InfoXtractor</span>
|
||||
<a href="/ui">Upload a new extraction</a>
|
||||
<a href="/ui/jobs">Recent jobs</a>
|
||||
</nav>
|
||||
|
||||
<p class="breadcrumb">
|
||||
<a href="/ui">Home</a> › Jobs
|
||||
</p>
|
||||
|
||||
<hgroup>
|
||||
<h1>Recent jobs</h1>
|
||||
<p>All submitted extractions, newest first.</p>
|
||||
</hgroup>
|
||||
|
||||
<form class="filter-bar" method="get" action="/ui/jobs">
|
||||
<fieldset>
|
||||
<legend><small>Status</small></legend>
|
||||
{% for s in valid_statuses %}
|
||||
<label class="inline">
|
||||
<input
|
||||
type="checkbox"
|
||||
name="status"
|
||||
value="{{ s }}"
|
||||
{% if s in status_filter %}checked{% endif %}
|
||||
/>
|
||||
{{ s }}
|
||||
</label>
|
||||
{% endfor %}
|
||||
</fieldset>
|
||||
<label>
|
||||
Client id
|
||||
<input
|
||||
type="text"
|
||||
name="client_id"
|
||||
value="{{ client_filter }}"
|
||||
placeholder="e.g. ui, mammon"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
Page size
|
||||
<input
|
||||
type="number"
|
||||
name="limit"
|
||||
min="1"
|
||||
max="200"
|
||||
value="{{ limit }}"
|
||||
/>
|
||||
</label>
|
||||
<button type="submit">Apply</button>
|
||||
</form>
|
||||
|
||||
<p class="counter">
|
||||
Showing {{ shown }} of {{ total }} job{% if total != 1 %}s{% endif %}.
|
||||
</p>
|
||||
|
||||
{% if rows %}
|
||||
<figure>
|
||||
<table class="jobs-table" role="grid">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Status</th>
|
||||
<th>Filename</th>
|
||||
<th>Use case</th>
|
||||
<th>Client</th>
|
||||
<th>Submitted</th>
|
||||
<th>Elapsed</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
<td>
|
||||
<span class="status-badge status-{{ row.status }}">{{ row.status }}</span>
|
||||
</td>
|
||||
<td class="col-filename"><code>{{ row.display_name }}</code></td>
|
||||
<td>{{ row.use_case }}</td>
|
||||
<td>{{ row.client_id }}</td>
|
||||
<td class="col-created">
|
||||
{{ row.created_at }}
|
||||
<small>{{ row.created_delta }}</small>
|
||||
</td>
|
||||
<td>{{ row.elapsed }}</td>
|
||||
<td>
|
||||
<a href="/ui/jobs/{{ row.job_id }}">open ›</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</figure>
|
||||
{% else %}
|
||||
<p class="empty-note">No jobs match the current filters.</p>
|
||||
{% endif %}
|
||||
|
||||
<div class="pagination">
|
||||
{% if prev_link %}
|
||||
<a href="{{ prev_link }}" role="button" class="secondary outline">« Prev</a>
|
||||
{% else %}
|
||||
<span aria-disabled="true" class="secondary outline" role="button" style="opacity: 0.4;">« Prev</span>
|
||||
{% endif %}
|
||||
<span class="counter">Offset {{ offset }}</span>
|
||||
{% if next_link %}
|
||||
<a href="{{ next_link }}" role="button" class="secondary outline">Next »</a>
|
||||
{% else %}
|
||||
<span aria-disabled="true" class="secondary outline" role="button" style="opacity: 0.4;">Next »</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -476,3 +476,204 @@ async def test_concurrent_claim_never_double_dispatches(
|
|||
non_null = [r for r in results if r is not None]
|
||||
# Every inserted id appears at most once.
|
||||
assert sorted(non_null) == sorted(ids)
|
||||
|
||||
|
||||
# ---------- list_recent ---------------------------------------------------
|
||||
#
|
||||
# The UI's ``/ui/jobs`` page needs a paginated, filterable view of recent
|
||||
# jobs. We keep the contract intentionally small: list_recent returns
|
||||
# ``(jobs, total)`` — ``total`` is the count after filters but before
|
||||
# limit/offset — so the template can render "Showing N of M".
|
||||
|
||||
|
||||
async def test_list_recent_empty_db(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
jobs, total = await jobs_repo.list_recent(session, limit=50, offset=0)
|
||||
assert jobs == []
|
||||
assert total == 0
|
||||
|
||||
|
||||
async def test_list_recent_orders_newest_first(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> None:
|
||||
ids: list[UUID] = []
|
||||
for i in range(3):
|
||||
async with session_factory() as session:
|
||||
job = await jobs_repo.insert_pending(
|
||||
session, _make_request("c", f"lr-{i}"), callback_url=None
|
||||
)
|
||||
await session.commit()
|
||||
ids.append(job.job_id)
|
||||
|
||||
async with session_factory() as session:
|
||||
jobs, total = await jobs_repo.list_recent(session, limit=50, offset=0)
|
||||
|
||||
assert total == 3
|
||||
# Newest first → reverse of insertion order.
|
||||
assert [j.job_id for j in jobs] == list(reversed(ids))
|
||||
|
||||
|
||||
async def test_list_recent_status_single_filter(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> None:
|
||||
# Two pending, one done.
|
||||
async with session_factory() as session:
|
||||
for i in range(3):
|
||||
await jobs_repo.insert_pending(
|
||||
session, _make_request("c", f"sf-{i}"), callback_url=None
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
async with session_factory() as session:
|
||||
claimed = await jobs_repo.claim_next_pending(session)
|
||||
assert claimed is not None
|
||||
await jobs_repo.mark_done(
|
||||
session,
|
||||
claimed.job_id,
|
||||
ResponseIX(
|
||||
use_case="bank_statement_header",
|
||||
ix_client_id="c",
|
||||
request_id=claimed.request_id,
|
||||
),
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
async with session_factory() as session:
|
||||
done_jobs, done_total = await jobs_repo.list_recent(
|
||||
session, limit=50, offset=0, status="done"
|
||||
)
|
||||
assert done_total == 1
|
||||
assert len(done_jobs) == 1
|
||||
assert done_jobs[0].status == "done"
|
||||
|
||||
async with session_factory() as session:
|
||||
pending_jobs, pending_total = await jobs_repo.list_recent(
|
||||
session, limit=50, offset=0, status="pending"
|
||||
)
|
||||
assert pending_total == 2
|
||||
assert all(j.status == "pending" for j in pending_jobs)
|
||||
|
||||
|
||||
async def test_list_recent_status_iterable_filter(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> None:
|
||||
# Two pending, one done, one errored.
|
||||
async with session_factory() as session:
|
||||
for i in range(4):
|
||||
await jobs_repo.insert_pending(
|
||||
session, _make_request("c", f"if-{i}"), callback_url=None
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
async with session_factory() as session:
|
||||
a = await jobs_repo.claim_next_pending(session)
|
||||
assert a is not None
|
||||
await jobs_repo.mark_done(
|
||||
session,
|
||||
a.job_id,
|
||||
ResponseIX(
|
||||
use_case="bank_statement_header",
|
||||
ix_client_id="c",
|
||||
request_id=a.request_id,
|
||||
),
|
||||
)
|
||||
await session.commit()
|
||||
async with session_factory() as session:
|
||||
b = await jobs_repo.claim_next_pending(session)
|
||||
assert b is not None
|
||||
await jobs_repo.mark_error(session, b.job_id, ResponseIX(error="boom"))
|
||||
await session.commit()
|
||||
|
||||
async with session_factory() as session:
|
||||
jobs, total = await jobs_repo.list_recent(
|
||||
session, limit=50, offset=0, status=["done", "error"]
|
||||
)
|
||||
assert total == 2
|
||||
assert {j.status for j in jobs} == {"done", "error"}
|
||||
|
||||
|
||||
async def test_list_recent_client_id_filter(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
await jobs_repo.insert_pending(
|
||||
session, _make_request("alpha", "a-1"), callback_url=None
|
||||
)
|
||||
await jobs_repo.insert_pending(
|
||||
session, _make_request("beta", "b-1"), callback_url=None
|
||||
)
|
||||
await jobs_repo.insert_pending(
|
||||
session, _make_request("alpha", "a-2"), callback_url=None
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
async with session_factory() as session:
|
||||
jobs, total = await jobs_repo.list_recent(
|
||||
session, limit=50, offset=0, client_id="alpha"
|
||||
)
|
||||
assert total == 2
|
||||
assert all(j.client_id == "alpha" for j in jobs)
|
||||
|
||||
|
||||
async def test_list_recent_pagination(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> None:
|
||||
ids: list[UUID] = []
|
||||
for i in range(7):
|
||||
async with session_factory() as session:
|
||||
job = await jobs_repo.insert_pending(
|
||||
session, _make_request("c", f"pg-{i}"), callback_url=None
|
||||
)
|
||||
await session.commit()
|
||||
ids.append(job.job_id)
|
||||
|
||||
async with session_factory() as session:
|
||||
page1, total1 = await jobs_repo.list_recent(
|
||||
session, limit=3, offset=0
|
||||
)
|
||||
assert total1 == 7
|
||||
assert len(page1) == 3
|
||||
# Newest three are the last three inserted.
|
||||
assert [j.job_id for j in page1] == list(reversed(ids[-3:]))
|
||||
|
||||
async with session_factory() as session:
|
||||
page2, total2 = await jobs_repo.list_recent(
|
||||
session, limit=3, offset=3
|
||||
)
|
||||
assert total2 == 7
|
||||
assert len(page2) == 3
|
||||
expected = list(reversed(ids))[3:6]
|
||||
assert [j.job_id for j in page2] == expected
|
||||
|
||||
async with session_factory() as session:
|
||||
page3, total3 = await jobs_repo.list_recent(
|
||||
session, limit=3, offset=6
|
||||
)
|
||||
assert total3 == 7
|
||||
assert len(page3) == 1
|
||||
assert page3[0].job_id == ids[0]
|
||||
|
||||
|
||||
async def test_list_recent_caps_limit(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> None:
|
||||
"""limit is capped at 200 — asking for 9999 gets clamped."""
|
||||
|
||||
async with session_factory() as session:
|
||||
jobs, total = await jobs_repo.list_recent(
|
||||
session, limit=9999, offset=0
|
||||
)
|
||||
assert total == 0
|
||||
assert jobs == []
|
||||
|
||||
|
||||
async def test_list_recent_rejects_negative_offset(
|
||||
session_factory: async_sessionmaker[AsyncSession],
|
||||
) -> None:
|
||||
async with session_factory() as session:
|
||||
import pytest as _pytest
|
||||
|
||||
with _pytest.raises(ValueError):
|
||||
await jobs_repo.list_recent(session, limit=50, offset=-1)
|
||||
|
|
|
|||
|
|
@ -427,6 +427,261 @@ class TestFragment:
|
|||
assert "my-done-doc.pdf" in body
|
||||
|
||||
|
||||
class TestJobsListPage:
|
||||
"""Tests for the ``GET /ui/jobs`` listing page (feat/ui-jobs-list)."""
|
||||
|
||||
def _submit(
|
||||
self,
|
||||
app: TestClient,
|
||||
client_id: str,
|
||||
request_id: str,
|
||||
filename: str = "sample.pdf",
|
||||
) -> None:
|
||||
with FIXTURE_PDF.open("rb") as fh:
|
||||
app.post(
|
||||
"/ui/jobs",
|
||||
data={
|
||||
"use_case_mode": "registered",
|
||||
"use_case_name": "bank_statement_header",
|
||||
"ix_client_id": client_id,
|
||||
"request_id": request_id,
|
||||
},
|
||||
files={"pdf": (filename, fh, "application/pdf")},
|
||||
follow_redirects=False,
|
||||
)
|
||||
|
||||
def test_jobs_list_returns_html(
|
||||
self,
|
||||
app: TestClient,
|
||||
postgres_url: str,
|
||||
) -> None:
|
||||
for i in range(3):
|
||||
self._submit(
|
||||
app,
|
||||
"ui-list",
|
||||
f"lp-{uuid4().hex[:6]}-{i}",
|
||||
filename=f"doc-{i}.pdf",
|
||||
)
|
||||
|
||||
resp = app.get("/ui/jobs")
|
||||
assert resp.status_code == 200
|
||||
assert "text/html" in resp.headers["content-type"]
|
||||
body = resp.text
|
||||
# Breadcrumb / header shows "Jobs".
|
||||
assert "Jobs" in body
|
||||
# display_name surfaces for each row.
|
||||
for i in range(3):
|
||||
assert f"doc-{i}.pdf" in body
|
||||
# Showing N of M counter present.
|
||||
assert "Showing" in body
|
||||
assert "of" in body
|
||||
|
||||
def test_jobs_list_links_to_job_detail(
|
||||
self,
|
||||
app: TestClient,
|
||||
postgres_url: str,
|
||||
) -> None:
|
||||
rid = f"lp-link-{uuid4().hex[:6]}"
|
||||
self._submit(app, "ui-list", rid)
|
||||
row = _find_job(postgres_url, "ui-list", rid)
|
||||
assert row is not None
|
||||
resp = app.get("/ui/jobs")
|
||||
assert resp.status_code == 200
|
||||
assert f"/ui/jobs/{row.job_id}" in resp.text
|
||||
|
||||
def test_jobs_list_status_filter_single(
|
||||
self,
|
||||
app: TestClient,
|
||||
postgres_url: str,
|
||||
) -> None:
|
||||
# Create two jobs, flip one to done.
|
||||
rid_pending = f"lp-p-{uuid4().hex[:6]}"
|
||||
rid_done = f"lp-d-{uuid4().hex[:6]}"
|
||||
self._submit(app, "ui-filt", rid_pending, filename="pending-doc.pdf")
|
||||
self._submit(app, "ui-filt", rid_done, filename="done-doc.pdf")
|
||||
done_row = _find_job(postgres_url, "ui-filt", rid_done)
|
||||
assert done_row is not None
|
||||
_force_done(
|
||||
postgres_url,
|
||||
done_row.job_id,
|
||||
response_body={"use_case": "bank_statement_header"},
|
||||
)
|
||||
|
||||
# ?status=done → only done row shown.
|
||||
resp = app.get("/ui/jobs?status=done")
|
||||
assert resp.status_code == 200
|
||||
assert "done-doc.pdf" in resp.text
|
||||
assert "pending-doc.pdf" not in resp.text
|
||||
|
||||
def test_jobs_list_status_filter_multi(
|
||||
self,
|
||||
app: TestClient,
|
||||
postgres_url: str,
|
||||
) -> None:
|
||||
rid_p = f"lp-mp-{uuid4().hex[:6]}"
|
||||
rid_d = f"lp-md-{uuid4().hex[:6]}"
|
||||
rid_e = f"lp-me-{uuid4().hex[:6]}"
|
||||
self._submit(app, "ui-multi", rid_p, filename="pending-m.pdf")
|
||||
self._submit(app, "ui-multi", rid_d, filename="done-m.pdf")
|
||||
self._submit(app, "ui-multi", rid_e, filename="error-m.pdf")
|
||||
|
||||
done_row = _find_job(postgres_url, "ui-multi", rid_d)
|
||||
err_row = _find_job(postgres_url, "ui-multi", rid_e)
|
||||
assert done_row is not None and err_row is not None
|
||||
_force_done(
|
||||
postgres_url,
|
||||
done_row.job_id,
|
||||
response_body={"use_case": "bank_statement_header"},
|
||||
)
|
||||
_force_error(postgres_url, err_row.job_id)
|
||||
|
||||
resp = app.get("/ui/jobs?status=done&status=error")
|
||||
assert resp.status_code == 200
|
||||
body = resp.text
|
||||
assert "done-m.pdf" in body
|
||||
assert "error-m.pdf" in body
|
||||
assert "pending-m.pdf" not in body
|
||||
|
||||
def test_jobs_list_client_id_filter(
|
||||
self,
|
||||
app: TestClient,
|
||||
postgres_url: str,
|
||||
) -> None:
|
||||
rid_a = f"lp-a-{uuid4().hex[:6]}"
|
||||
rid_b = f"lp-b-{uuid4().hex[:6]}"
|
||||
self._submit(app, "client-alpha", rid_a, filename="alpha.pdf")
|
||||
self._submit(app, "client-beta", rid_b, filename="beta.pdf")
|
||||
|
||||
resp = app.get("/ui/jobs?client_id=client-alpha")
|
||||
assert resp.status_code == 200
|
||||
body = resp.text
|
||||
assert "alpha.pdf" in body
|
||||
assert "beta.pdf" not in body
|
||||
|
||||
def test_jobs_list_pagination(
|
||||
self,
|
||||
app: TestClient,
|
||||
postgres_url: str,
|
||||
) -> None:
|
||||
rids = []
|
||||
for i in range(7):
|
||||
rid = f"lp-pg-{uuid4().hex[:6]}-{i}"
|
||||
rids.append(rid)
|
||||
self._submit(app, "ui-pg", rid, filename=f"pg-{i}.pdf")
|
||||
|
||||
resp_p1 = app.get("/ui/jobs?limit=5&offset=0&client_id=ui-pg")
|
||||
assert resp_p1.status_code == 200
|
||||
body_p1 = resp_p1.text
|
||||
# Newest-first: last 5 uploaded are pg-6..pg-2.
|
||||
for i in (2, 3, 4, 5, 6):
|
||||
assert f"pg-{i}.pdf" in body_p1
|
||||
assert "pg-1.pdf" not in body_p1
|
||||
assert "pg-0.pdf" not in body_p1
|
||||
|
||||
resp_p2 = app.get("/ui/jobs?limit=5&offset=5&client_id=ui-pg")
|
||||
assert resp_p2.status_code == 200
|
||||
body_p2 = resp_p2.text
|
||||
assert "pg-1.pdf" in body_p2
|
||||
assert "pg-0.pdf" in body_p2
|
||||
# Showing 2 of 7 on page 2.
|
||||
assert "of 7" in body_p2
|
||||
|
||||
def test_jobs_list_missing_display_name_falls_back_to_basename(
|
||||
self,
|
||||
app: TestClient,
|
||||
postgres_url: str,
|
||||
) -> None:
|
||||
"""Legacy rows without display_name must still render via basename."""
|
||||
|
||||
from ix.contracts.request import Context, FileRef, RequestIX
|
||||
|
||||
legacy_req = RequestIX(
|
||||
use_case="bank_statement_header",
|
||||
ix_client_id="ui-legacy",
|
||||
request_id=f"lp-legacy-{uuid4().hex[:6]}",
|
||||
context=Context(
|
||||
files=[FileRef(url="file:///tmp/ix/ui/listing-legacy.pdf")]
|
||||
),
|
||||
)
|
||||
|
||||
import asyncio
|
||||
|
||||
from ix.store import jobs_repo as _repo
|
||||
|
||||
async def _insert() -> UUID:
|
||||
eng = create_async_engine(postgres_url)
|
||||
sf = async_sessionmaker(eng, expire_on_commit=False)
|
||||
try:
|
||||
async with sf() as session:
|
||||
job = await _repo.insert_pending(
|
||||
session, legacy_req, callback_url=None
|
||||
)
|
||||
await session.commit()
|
||||
return job.job_id
|
||||
finally:
|
||||
await eng.dispose()
|
||||
|
||||
asyncio.run(_insert())
|
||||
|
||||
resp = app.get("/ui/jobs?client_id=ui-legacy")
|
||||
assert resp.status_code == 200
|
||||
assert "listing-legacy.pdf" in resp.text
|
||||
|
||||
def test_jobs_list_header_link_from_index(
|
||||
self,
|
||||
app: TestClient,
|
||||
) -> None:
|
||||
resp = app.get("/ui")
|
||||
assert resp.status_code == 200
|
||||
assert 'href="/ui/jobs"' in resp.text
|
||||
|
||||
def test_jobs_list_header_link_from_detail(
|
||||
self,
|
||||
app: TestClient,
|
||||
postgres_url: str,
|
||||
) -> None:
|
||||
rid = f"lp-hd-{uuid4().hex[:6]}"
|
||||
self._submit(app, "ui-hd", rid)
|
||||
row = _find_job(postgres_url, "ui-hd", rid)
|
||||
assert row is not None
|
||||
resp = app.get(f"/ui/jobs/{row.job_id}")
|
||||
assert resp.status_code == 200
|
||||
assert 'href="/ui/jobs"' in resp.text
|
||||
|
||||
|
||||
def _force_error(
|
||||
postgres_url: str,
|
||||
job_id, # type: ignore[no-untyped-def]
|
||||
) -> None:
|
||||
"""Flip a pending/running job to ``error`` with a canned error body."""
|
||||
|
||||
import asyncio
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
async def _go(): # type: ignore[no-untyped-def]
|
||||
eng = create_async_engine(postgres_url)
|
||||
try:
|
||||
async with eng.begin() as conn:
|
||||
await conn.execute(
|
||||
text(
|
||||
"UPDATE ix_jobs SET status='error', "
|
||||
"response=CAST(:resp AS JSONB), finished_at=:now "
|
||||
"WHERE job_id=:jid"
|
||||
),
|
||||
{
|
||||
"resp": json.dumps({"error": "IX_002_000: forced"}),
|
||||
"now": datetime.now(UTC),
|
||||
"jid": str(job_id),
|
||||
},
|
||||
)
|
||||
finally:
|
||||
await eng.dispose()
|
||||
|
||||
asyncio.run(_go())
|
||||
|
||||
|
||||
def _find_job(postgres_url: str, client_id: str, request_id: str): # type: ignore[no-untyped-def]
|
||||
"""Look up an ``ix_jobs`` row via the async engine, wrapping the coroutine
|
||||
for test convenience."""
|
||||
|
|
|
|||
Loading…
Reference in a new issue