feat: professionalize control plane and standalone delivery
This commit is contained in:
1031
tests/test_api_server.py
Normal file
1031
tests/test_api_server.py
Normal file
File diff suppressed because it is too large
Load Diff
149
tests/test_control_plane_get_dispatcher.py
Normal file
149
tests/test_control_plane_get_dispatcher.py
Normal file
@ -0,0 +1,149 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
import unittest
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
|
||||
from biliup_next.app.control_plane_get_dispatcher import ControlPlaneGetDispatcher
|
||||
from biliup_next.core.models import ActionRecord, Task, TaskContext
|
||||
|
||||
|
||||
class FakeRepo:
|
||||
def __init__(self, task: Task, context: TaskContext | None = None, actions: list[ActionRecord] | None = None) -> None:
|
||||
self.task = task
|
||||
self.context = context
|
||||
self.actions = actions or []
|
||||
|
||||
def query_tasks(self, **kwargs): # type: ignore[no-untyped-def]
|
||||
return [self.task], 1
|
||||
|
||||
def get_task(self, task_id: str) -> Task | None:
|
||||
return self.task if task_id == self.task.id else None
|
||||
|
||||
def get_task_context(self, task_id: str) -> TaskContext | None:
|
||||
return self.context if self.context and self.context.task_id == task_id else None
|
||||
|
||||
def list_task_contexts_for_task_ids(self, task_ids: list[str]) -> dict[str, TaskContext]:
|
||||
if self.context and self.context.task_id in task_ids:
|
||||
return {self.context.task_id: self.context}
|
||||
return {}
|
||||
|
||||
def list_steps_for_task_ids(self, task_ids: list[str]) -> dict[str, list[object]]:
|
||||
return {self.task.id: []} if self.task.id in task_ids else {}
|
||||
|
||||
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
|
||||
if self.context and self.context.session_key == session_key:
|
||||
return [self.context]
|
||||
return []
|
||||
|
||||
def list_steps(self, task_id: str) -> list[object]:
|
||||
return []
|
||||
|
||||
def list_artifacts(self, task_id: str) -> list[object]:
|
||||
return []
|
||||
|
||||
def list_action_records(
|
||||
self,
|
||||
task_id: str | None = None,
|
||||
limit: int = 200,
|
||||
action_name: str | None = None,
|
||||
status: str | None = None,
|
||||
) -> list[ActionRecord]:
|
||||
items = list(self.actions)
|
||||
if task_id is not None:
|
||||
items = [item for item in items if item.task_id == task_id]
|
||||
if action_name is not None:
|
||||
items = [item for item in items if item.action_name == action_name]
|
||||
if status is not None:
|
||||
items = [item for item in items if item.status == status]
|
||||
return items[:limit]
|
||||
|
||||
|
||||
class FakeSettingsService:
|
||||
def __init__(self, root) -> None: # type: ignore[no-untyped-def]
|
||||
self.root = root
|
||||
|
||||
def load_redacted(self):
|
||||
return SimpleNamespace(settings={"runtime": {"control_token": "secret"}})
|
||||
|
||||
def load(self):
|
||||
return SimpleNamespace(schema={"title": "SettingsSchema"})
|
||||
|
||||
|
||||
class ControlPlaneGetDispatcherTests(unittest.TestCase):
|
||||
def _dispatcher(self, tmpdir: str, repo: FakeRepo) -> ControlPlaneGetDispatcher:
|
||||
state = {
|
||||
"root": Path(tmpdir),
|
||||
"repo": repo,
|
||||
"settings": {
|
||||
"paths": {"session_dir": str(Path(tmpdir) / "session")},
|
||||
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
|
||||
"cleanup": {},
|
||||
"publish": {},
|
||||
},
|
||||
"registry": SimpleNamespace(list_manifests=lambda: [{"name": "publish.biliup_cli"}]),
|
||||
"manifests": [{"name": "publish.biliup_cli"}],
|
||||
}
|
||||
return ControlPlaneGetDispatcher(
|
||||
state,
|
||||
attention_state_fn=lambda payload: "running" if payload.get("status") == "running" else "stable",
|
||||
delivery_state_label_fn=lambda payload: "pending_comment" if payload.get("delivery_state", {}).get("split_comment") == "pending" else "stable",
|
||||
build_scheduler_preview_fn=lambda state, include_stage_scan=False, limit=200: {"items": [{"limit": limit}]},
|
||||
settings_service_factory=FakeSettingsService,
|
||||
)
|
||||
|
||||
def test_handle_settings_schema_returns_schema(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
|
||||
|
||||
body, status = dispatcher.handle_settings_schema()
|
||||
|
||||
self.assertEqual(status, HTTPStatus.OK)
|
||||
self.assertEqual(body["title"], "SettingsSchema")
|
||||
|
||||
def test_handle_history_filters_records(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
actions = [
|
||||
ActionRecord(None, "task-1", "comment", "ok", "comment ok", "{}", "2026-01-01T00:01:00+00:00"),
|
||||
ActionRecord(None, "task-1", "publish", "error", "publish failed", "{}", "2026-01-01T00:02:00+00:00"),
|
||||
]
|
||||
dispatcher = self._dispatcher(tmpdir, FakeRepo(task, actions=actions))
|
||||
|
||||
body, status = dispatcher.handle_history(limit=100, task_id="task-1", action_name="comment", status="ok")
|
||||
|
||||
self.assertEqual(status, HTTPStatus.OK)
|
||||
self.assertEqual(len(body["items"]), 1)
|
||||
self.assertEqual(body["items"][0]["action_name"], "comment")
|
||||
|
||||
def test_handle_session_returns_not_found_when_missing(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
|
||||
|
||||
body, status = dispatcher.handle_session("missing-session")
|
||||
|
||||
self.assertEqual(status, HTTPStatus.NOT_FOUND)
|
||||
self.assertEqual(body["error"], "session not found")
|
||||
|
||||
def test_handle_tasks_filters_attention(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "running", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
|
||||
|
||||
body, status = dispatcher.handle_tasks(
|
||||
limit=10,
|
||||
offset=0,
|
||||
status=None,
|
||||
search=None,
|
||||
sort="updated_desc",
|
||||
attention="running",
|
||||
delivery=None,
|
||||
)
|
||||
|
||||
self.assertEqual(status, HTTPStatus.OK)
|
||||
self.assertEqual(body["total"], 1)
|
||||
self.assertEqual(body["items"][0]["id"], "task-1")
|
||||
111
tests/test_control_plane_post_dispatcher.py
Normal file
111
tests/test_control_plane_post_dispatcher.py
Normal file
@ -0,0 +1,111 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import tempfile
|
||||
import unittest
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
|
||||
from biliup_next.app.control_plane_post_dispatcher import ControlPlanePostDispatcher
|
||||
from biliup_next.core.models import Task
|
||||
|
||||
|
||||
class FakeRepo:
|
||||
def __init__(self) -> None:
|
||||
self.actions = []
|
||||
|
||||
def add_action_record(self, action) -> None: # type: ignore[no-untyped-def]
|
||||
self.actions.append(action)
|
||||
|
||||
|
||||
class ModuleError(Exception):
|
||||
def to_dict(self) -> dict[str, object]:
|
||||
return {"error": "conflict"}
|
||||
|
||||
|
||||
class ControlPlanePostDispatcherTests(unittest.TestCase):
|
||||
def _dispatcher(self, tmpdir: str, repo: FakeRepo, *, ingest_service: object | None = None) -> ControlPlanePostDispatcher:
|
||||
state = {
|
||||
"repo": repo,
|
||||
"root": Path(tmpdir),
|
||||
"settings": {
|
||||
"paths": {"stage_dir": str(Path(tmpdir) / "stage"), "session_dir": str(Path(tmpdir) / "session")},
|
||||
"ingest": {"stage_min_free_space_mb": 100},
|
||||
},
|
||||
"ingest_service": ingest_service or SimpleNamespace(
|
||||
create_task_from_file=lambda path, settings: Task(
|
||||
"task-1",
|
||||
"local_file",
|
||||
str(path),
|
||||
"task-title",
|
||||
"created",
|
||||
"2026-01-01T00:00:00+00:00",
|
||||
"2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
),
|
||||
}
|
||||
return ControlPlanePostDispatcher(
|
||||
state,
|
||||
bind_full_video_action=lambda task_id, bvid: {"task_id": task_id, "full_video_bvid": bvid},
|
||||
merge_session_action=lambda session_key, task_ids: {"session_key": session_key, "task_ids": task_ids},
|
||||
receive_full_video_webhook=lambda payload: {"ok": True, **payload},
|
||||
rebind_session_full_video_action=lambda session_key, bvid: {"session_key": session_key, "full_video_bvid": bvid},
|
||||
reset_to_step_action=lambda task_id, step_name: {"task_id": task_id, "step_name": step_name},
|
||||
retry_step_action=lambda task_id, step_name: {"task_id": task_id, "step_name": step_name},
|
||||
run_task_action=lambda task_id: {"task_id": task_id},
|
||||
run_once=lambda: {"scheduler": {"scan_count": 1}, "worker": {"picked": 1}},
|
||||
stage_importer_factory=lambda: SimpleNamespace(
|
||||
import_file=lambda source, dest, min_free_bytes=0: {"imported_to": str(dest / source.name)},
|
||||
import_upload=lambda filename, fileobj, dest, min_free_bytes=0: {"filename": filename, "dest": str(dest)},
|
||||
),
|
||||
systemd_runtime_factory=lambda: SimpleNamespace(act=lambda service, action: {"service": service, "action": action, "command_ok": True}),
|
||||
)
|
||||
|
||||
def test_handle_bind_full_video_maps_missing_bvid(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
dispatcher = self._dispatcher(tmpdir, FakeRepo())
|
||||
|
||||
body, status = dispatcher.handle_bind_full_video("task-1", {})
|
||||
|
||||
self.assertEqual(status, HTTPStatus.BAD_REQUEST)
|
||||
self.assertEqual(body["error"], "missing full_video_bvid")
|
||||
|
||||
def test_handle_worker_run_once_records_action(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
repo = FakeRepo()
|
||||
dispatcher = self._dispatcher(tmpdir, repo)
|
||||
|
||||
body, status = dispatcher.handle_worker_run_once()
|
||||
|
||||
self.assertEqual(status, HTTPStatus.ACCEPTED)
|
||||
self.assertEqual(body["worker"]["picked"], 1)
|
||||
self.assertEqual(repo.actions[-1].action_name, "worker_run_once")
|
||||
|
||||
def test_handle_stage_upload_returns_created(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
dispatcher = self._dispatcher(tmpdir, FakeRepo())
|
||||
file_item = SimpleNamespace(filename="incoming.mp4", file=io.BytesIO(b"video"))
|
||||
|
||||
body, status = dispatcher.handle_stage_upload(file_item)
|
||||
|
||||
self.assertEqual(status, HTTPStatus.CREATED)
|
||||
self.assertEqual(body["filename"], "incoming.mp4")
|
||||
|
||||
def test_handle_create_task_maps_module_error_to_conflict(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
repo = FakeRepo()
|
||||
|
||||
def raise_module_error(path, settings): # type: ignore[no-untyped-def]
|
||||
raise ModuleError()
|
||||
|
||||
dispatcher = self._dispatcher(
|
||||
tmpdir,
|
||||
repo,
|
||||
ingest_service=SimpleNamespace(create_task_from_file=raise_module_error),
|
||||
)
|
||||
|
||||
body, status = dispatcher.handle_create_task({"source_path": str(Path(tmpdir) / "source.mp4")})
|
||||
|
||||
self.assertEqual(status, HTTPStatus.CONFLICT)
|
||||
self.assertEqual(body["error"], "conflict")
|
||||
42
tests/test_retry_meta.py
Normal file
42
tests/test_retry_meta.py
Normal file
@ -0,0 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from types import SimpleNamespace
|
||||
|
||||
from biliup_next.app.retry_meta import retry_meta_for_step
|
||||
|
||||
|
||||
class RetryMetaTests(unittest.TestCase):
|
||||
def test_retry_meta_uses_schedule_minutes(self) -> None:
|
||||
step = SimpleNamespace(
|
||||
step_name="publish",
|
||||
status="failed_retryable",
|
||||
retry_count=1,
|
||||
started_at=None,
|
||||
finished_at="2099-01-01T00:00:00+00:00",
|
||||
)
|
||||
|
||||
payload = retry_meta_for_step(step, {"publish": {"retry_schedule_minutes": [15, 5]}})
|
||||
|
||||
self.assertIsNotNone(payload)
|
||||
self.assertEqual(payload["retry_wait_seconds"], 900)
|
||||
self.assertFalse(payload["retry_due"])
|
||||
|
||||
def test_retry_meta_marks_exhausted_after_schedule_is_consumed(self) -> None:
|
||||
step = SimpleNamespace(
|
||||
step_name="comment",
|
||||
status="failed_retryable",
|
||||
retry_count=3,
|
||||
started_at=None,
|
||||
finished_at="2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
|
||||
payload = retry_meta_for_step(step, {"comment": {"retry_schedule_minutes": [1, 2]}})
|
||||
|
||||
self.assertIsNotNone(payload)
|
||||
self.assertTrue(payload["retry_exhausted"])
|
||||
self.assertIsNone(payload["next_retry_at"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
177
tests/test_serializers.py
Normal file
177
tests/test_serializers.py
Normal file
@ -0,0 +1,177 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from biliup_next.app.serializers import ControlPlaneSerializer
|
||||
from biliup_next.core.models import ActionRecord, Artifact, Task, TaskContext, TaskStep
|
||||
|
||||
|
||||
class FakeSerializerRepo:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
task: Task,
|
||||
context: TaskContext | None = None,
|
||||
steps: list[TaskStep] | None = None,
|
||||
artifacts: list[Artifact] | None = None,
|
||||
actions: list[ActionRecord] | None = None,
|
||||
) -> None:
|
||||
self.task = task
|
||||
self.context = context
|
||||
self.steps = steps or []
|
||||
self.artifacts = artifacts or []
|
||||
self.actions = actions or []
|
||||
|
||||
def get_task(self, task_id: str) -> Task | None:
|
||||
return self.task if task_id == self.task.id else None
|
||||
|
||||
def get_task_context(self, task_id: str) -> TaskContext | None:
|
||||
return self.context if task_id == self.task.id else None
|
||||
|
||||
def list_task_contexts_for_task_ids(self, task_ids: list[str]) -> dict[str, TaskContext]:
|
||||
if self.context and self.context.task_id in task_ids:
|
||||
return {self.context.task_id: self.context}
|
||||
return {}
|
||||
|
||||
def list_steps_for_task_ids(self, task_ids: list[str]) -> dict[str, list[TaskStep]]:
|
||||
if self.task.id in task_ids:
|
||||
return {self.task.id: list(self.steps)}
|
||||
return {}
|
||||
|
||||
def list_steps(self, task_id: str) -> list[TaskStep]:
|
||||
return list(self.steps) if task_id == self.task.id else []
|
||||
|
||||
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
|
||||
if self.context and self.context.session_key == session_key:
|
||||
return [self.context]
|
||||
return []
|
||||
|
||||
def list_artifacts(self, task_id: str) -> list[Artifact]:
|
||||
return list(self.artifacts) if task_id == self.task.id else []
|
||||
|
||||
def list_action_records(self, task_id: str, limit: int = 200) -> list[ActionRecord]:
|
||||
return list(self.actions)[:limit] if task_id == self.task.id else []
|
||||
|
||||
|
||||
class SerializerTests(unittest.TestCase):
|
||||
def test_task_payload_includes_context_retry_and_delivery_state(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", str(Path(tmpdir) / "session" / "task-title" / "source.mp4"), "task-title", "running", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00")
|
||||
session_dir = Path(tmpdir) / "session" / "task-title"
|
||||
session_dir.mkdir(parents=True, exist_ok=True)
|
||||
(session_dir / "full_video_bvid.txt").write_text("BVFULL123", encoding="utf-8")
|
||||
(session_dir / "bvid.txt").write_text("BVSPLIT123", encoding="utf-8")
|
||||
steps = [
|
||||
TaskStep(None, "task-1", "publish", "failed_retryable", "ERR", "upload failed", 1, None, "2099-01-01T00:00:00+00:00"),
|
||||
]
|
||||
context = TaskContext(
|
||||
id=None,
|
||||
task_id="task-1",
|
||||
session_key="session-1",
|
||||
streamer="streamer",
|
||||
room_id="room",
|
||||
source_title="task-title",
|
||||
segment_started_at=None,
|
||||
segment_duration_seconds=None,
|
||||
full_video_bvid=None,
|
||||
created_at="2026-01-01T00:00:00+00:00",
|
||||
updated_at="2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
repo = FakeSerializerRepo(task=task, context=context, steps=steps)
|
||||
state = {
|
||||
"repo": repo,
|
||||
"settings": {
|
||||
"paths": {"session_dir": str(Path(tmpdir) / "session")},
|
||||
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
|
||||
"cleanup": {},
|
||||
"publish": {"retry_schedule_minutes": [10]},
|
||||
},
|
||||
}
|
||||
|
||||
payload = ControlPlaneSerializer(state).task_payload("task-1")
|
||||
|
||||
self.assertIsNotNone(payload)
|
||||
self.assertEqual(payload["session_context"]["session_key"], "session-1")
|
||||
self.assertEqual(payload["session_context"]["full_video_bvid"], "BVFULL123")
|
||||
self.assertEqual(payload["retry_state"]["step_name"], "publish")
|
||||
self.assertEqual(payload["delivery_state"]["split_comment"], "pending")
|
||||
|
||||
def test_session_payload_reuses_task_payload_serialization(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", str(Path(tmpdir) / "session" / "task-title" / "source.mp4"), "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00")
|
||||
context = TaskContext(
|
||||
id=None,
|
||||
task_id="task-1",
|
||||
session_key="session-1",
|
||||
streamer="streamer",
|
||||
room_id="room",
|
||||
source_title="task-title",
|
||||
segment_started_at=None,
|
||||
segment_duration_seconds=None,
|
||||
full_video_bvid="BVFULL123",
|
||||
created_at="2026-01-01T00:00:00+00:00",
|
||||
updated_at="2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
repo = FakeSerializerRepo(task=task, context=context)
|
||||
state = {
|
||||
"repo": repo,
|
||||
"settings": {
|
||||
"paths": {"session_dir": str(Path(tmpdir) / "session")},
|
||||
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
|
||||
"cleanup": {},
|
||||
"publish": {},
|
||||
},
|
||||
}
|
||||
|
||||
payload = ControlPlaneSerializer(state).session_payload("session-1")
|
||||
|
||||
self.assertIsNotNone(payload)
|
||||
self.assertEqual(payload["session_key"], "session-1")
|
||||
self.assertEqual(payload["task_count"], 1)
|
||||
self.assertEqual(payload["full_video_url"], "https://www.bilibili.com/video/BVFULL123")
|
||||
self.assertEqual(payload["tasks"][0]["id"], "task-1")
|
||||
|
||||
def test_timeline_payload_includes_task_step_artifact_and_action_entries(self) -> None:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00")
|
||||
steps = [
|
||||
TaskStep(None, "task-1", "comment", "succeeded", None, None, 0, "2026-01-01T00:01:00+00:00", "2026-01-01T00:01:30+00:00"),
|
||||
]
|
||||
artifacts = [
|
||||
Artifact(None, "task-1", "publish_bvid", "/tmp/bvid.txt", "{}", "2026-01-01T00:01:40+00:00"),
|
||||
]
|
||||
actions = [
|
||||
ActionRecord(
|
||||
id=None,
|
||||
task_id="task-1",
|
||||
action_name="comment",
|
||||
status="ok",
|
||||
summary="comment succeeded",
|
||||
details_json=json.dumps({"split": {"status": "ok"}, "full": {"status": "skipped"}}),
|
||||
created_at="2026-01-01T00:01:50+00:00",
|
||||
)
|
||||
]
|
||||
repo = FakeSerializerRepo(task=task, steps=steps, artifacts=artifacts, actions=actions)
|
||||
state = {
|
||||
"repo": repo,
|
||||
"settings": {
|
||||
"paths": {"session_dir": "/tmp/session"},
|
||||
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
|
||||
"cleanup": {},
|
||||
"publish": {},
|
||||
},
|
||||
}
|
||||
|
||||
payload = ControlPlaneSerializer(state).timeline_payload("task-1")
|
||||
|
||||
self.assertIsNotNone(payload)
|
||||
action_item = next(item for item in payload["items"] if item["kind"] == "action")
|
||||
self.assertIn("split=ok", action_item["summary"])
|
||||
kinds = {item["kind"] for item in payload["items"]}
|
||||
self.assertTrue({"task", "step", "artifact", "action"}.issubset(kinds))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
92
tests/test_session_delivery_service.py
Normal file
92
tests/test_session_delivery_service.py
Normal file
@ -0,0 +1,92 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from biliup_next.app.session_delivery_service import SessionDeliveryService
|
||||
from biliup_next.core.models import Task, TaskContext
|
||||
|
||||
|
||||
class FakeRepo:
|
||||
def __init__(self, task: Task, context: TaskContext | None = None, contexts: list[TaskContext] | None = None) -> None:
|
||||
self.task = task
|
||||
self.context = context
|
||||
self.contexts = contexts or ([] if context is None else [context])
|
||||
self.task_context_upserts: list[TaskContext] = []
|
||||
self.session_binding_upserts = []
|
||||
self.action_records = []
|
||||
self.updated_session_bvid: tuple[str, str, str] | None = None
|
||||
|
||||
def get_task(self, task_id: str) -> Task | None:
|
||||
return self.task if task_id == self.task.id else None
|
||||
|
||||
def get_task_context(self, task_id: str) -> TaskContext | None:
|
||||
return self.context if task_id == self.task.id else None
|
||||
|
||||
def upsert_task_context(self, context: TaskContext) -> None:
|
||||
self.context = context
|
||||
self.task_context_upserts.append(context)
|
||||
|
||||
def upsert_session_binding(self, binding) -> None: # type: ignore[no-untyped-def]
|
||||
self.session_binding_upserts.append(binding)
|
||||
|
||||
def add_action_record(self, record) -> None: # type: ignore[no-untyped-def]
|
||||
self.action_records.append(record)
|
||||
|
||||
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
|
||||
return [context for context in self.contexts if context.session_key == session_key]
|
||||
|
||||
def update_session_full_video_bvid(self, session_key: str, full_video_bvid: str, updated_at: str) -> int:
|
||||
self.updated_session_bvid = (session_key, full_video_bvid, updated_at)
|
||||
return len(self.list_task_contexts_by_session_key(session_key))
|
||||
|
||||
def list_task_contexts_by_source_title(self, source_title: str) -> list[TaskContext]:
|
||||
return [context for context in self.contexts if context.source_title == source_title]
|
||||
|
||||
|
||||
class SessionDeliveryServiceTests(unittest.TestCase):
|
||||
def test_receive_full_video_webhook_updates_binding_context_and_action_record(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
context = TaskContext(
|
||||
id=None,
|
||||
task_id="task-1",
|
||||
session_key="task:task-1",
|
||||
streamer="streamer",
|
||||
room_id="room",
|
||||
source_title="task-title",
|
||||
segment_started_at=None,
|
||||
segment_duration_seconds=None,
|
||||
full_video_bvid=None,
|
||||
created_at="2026-01-01T00:00:00+00:00",
|
||||
updated_at="2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
repo = FakeRepo(task, context=context, contexts=[context])
|
||||
state = {"repo": repo, "settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}}}
|
||||
|
||||
result = SessionDeliveryService(state).receive_full_video_webhook(
|
||||
{"session_key": "session-1", "source_title": "task-title", "full_video_bvid": "BVWEBHOOK123"}
|
||||
)
|
||||
|
||||
self.assertEqual(result["updated_count"], 1)
|
||||
self.assertEqual(repo.context.session_key, "session-1")
|
||||
self.assertEqual(repo.context.full_video_bvid, "BVWEBHOOK123")
|
||||
self.assertEqual(repo.session_binding_upserts[-1].full_video_bvid, "BVWEBHOOK123")
|
||||
self.assertEqual(repo.action_records[-1].action_name, "webhook_full_video_uploaded")
|
||||
persisted_path = Path(result["tasks"][0]["path"])
|
||||
self.assertTrue(persisted_path.exists())
|
||||
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVWEBHOOK123")
|
||||
|
||||
def test_merge_session_returns_error_when_task_ids_empty(self) -> None:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
repo = FakeRepo(task)
|
||||
state = {"repo": repo, "settings": {"paths": {"session_dir": "/tmp/session"}}}
|
||||
|
||||
result = SessionDeliveryService(state).merge_session("session-1", ["", " "])
|
||||
|
||||
self.assertEqual(result["error"]["code"], "TASK_IDS_EMPTY")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
80
tests/test_settings_service.py
Normal file
80
tests/test_settings_service.py
Normal file
@ -0,0 +1,80 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from biliup_next.core.config import SettingsService
|
||||
|
||||
|
||||
class SettingsServiceTests(unittest.TestCase):
|
||||
def test_load_seeds_settings_from_standalone_example_when_missing(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
root = Path(tmpdir)
|
||||
config_dir = root / "config"
|
||||
config_dir.mkdir(parents=True, exist_ok=True)
|
||||
(config_dir / "settings.schema.json").write_text(
|
||||
"""
|
||||
{
|
||||
"groups": {
|
||||
"runtime": {
|
||||
"database_path": {"type": "string", "default": "data/workspace/biliup_next.db"}
|
||||
},
|
||||
"paths": {
|
||||
"stage_dir": {"type": "string", "default": "data/workspace/stage"},
|
||||
"backup_dir": {"type": "string", "default": "data/workspace/backup"},
|
||||
"session_dir": {"type": "string", "default": "data/workspace/session"},
|
||||
"cookies_file": {"type": "string", "default": "runtime/cookies.json"},
|
||||
"upload_config_file": {"type": "string", "default": "runtime/upload_config.json"}
|
||||
},
|
||||
"ingest": {
|
||||
"ffprobe_bin": {"type": "string", "default": "ffprobe"}
|
||||
},
|
||||
"transcribe": {
|
||||
"ffmpeg_bin": {"type": "string", "default": "ffmpeg"}
|
||||
},
|
||||
"split": {
|
||||
"ffmpeg_bin": {"type": "string", "default": "ffmpeg"}
|
||||
},
|
||||
"song_detect": {
|
||||
"codex_cmd": {"type": "string", "default": "codex"}
|
||||
},
|
||||
"publish": {
|
||||
"biliup_path": {"type": "string", "default": "runtime/biliup"},
|
||||
"cookie_file": {"type": "string", "default": "runtime/cookies.json"}
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
encoding="utf-8",
|
||||
)
|
||||
(config_dir / "settings.standalone.example.json").write_text(
|
||||
"""
|
||||
{
|
||||
"runtime": {"database_path": "data/workspace/biliup_next.db"},
|
||||
"paths": {
|
||||
"stage_dir": "data/workspace/stage",
|
||||
"backup_dir": "data/workspace/backup",
|
||||
"session_dir": "data/workspace/session",
|
||||
"cookies_file": "runtime/cookies.json",
|
||||
"upload_config_file": "runtime/upload_config.json"
|
||||
},
|
||||
"ingest": {"ffprobe_bin": "ffprobe"},
|
||||
"transcribe": {"ffmpeg_bin": "ffmpeg"},
|
||||
"split": {"ffmpeg_bin": "ffmpeg"},
|
||||
"song_detect": {"codex_cmd": "codex"},
|
||||
"publish": {"biliup_path": "runtime/biliup", "cookie_file": "runtime/cookies.json"}
|
||||
}
|
||||
""",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
bundle = SettingsService(root).load()
|
||||
|
||||
self.assertTrue((config_dir / "settings.json").exists())
|
||||
self.assertTrue((config_dir / "settings.staged.json").exists())
|
||||
self.assertEqual(bundle.settings["paths"]["cookies_file"], str((root / "runtime" / "cookies.json").resolve()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
143
tests/test_task_actions.py
Normal file
143
tests/test_task_actions.py
Normal file
@ -0,0 +1,143 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from biliup_next.app.task_actions import bind_full_video_action, merge_session_action, rebind_session_full_video_action
|
||||
from biliup_next.core.models import Task, TaskContext
|
||||
|
||||
|
||||
class FakeRepo:
|
||||
def __init__(self, task: Task, context: TaskContext | None = None, contexts: list[TaskContext] | None = None) -> None:
|
||||
self.task = task
|
||||
self.context = context
|
||||
self.contexts = contexts or ([] if context is None else [context])
|
||||
self.task_context_upserts: list[TaskContext] = []
|
||||
self.session_binding_upserts = []
|
||||
self.updated_session_bvid: tuple[str, str, str] | None = None
|
||||
|
||||
def get_task(self, task_id: str) -> Task | None:
|
||||
return self.task if task_id == self.task.id else None
|
||||
|
||||
def get_task_context(self, task_id: str) -> TaskContext | None:
|
||||
return self.context if task_id == self.task.id else None
|
||||
|
||||
def upsert_task_context(self, context: TaskContext) -> None:
|
||||
self.context = context
|
||||
self.task_context_upserts.append(context)
|
||||
|
||||
def upsert_session_binding(self, binding) -> None: # type: ignore[no-untyped-def]
|
||||
self.session_binding_upserts.append(binding)
|
||||
|
||||
def add_action_record(self, record) -> None: # type: ignore[no-untyped-def]
|
||||
return None
|
||||
|
||||
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
|
||||
return [context for context in self.contexts if context.session_key == session_key]
|
||||
|
||||
def update_session_full_video_bvid(self, session_key: str, full_video_bvid: str, updated_at: str) -> int:
|
||||
self.updated_session_bvid = (session_key, full_video_bvid, updated_at)
|
||||
return len(self.list_task_contexts_by_session_key(session_key))
|
||||
|
||||
def list_task_contexts_by_source_title(self, source_title: str) -> list[TaskContext]:
|
||||
return [context for context in self.contexts if context.source_title == source_title]
|
||||
|
||||
|
||||
class TaskActionsTests(unittest.TestCase):
|
||||
def test_bind_full_video_action_persists_context_binding_and_file(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
repo = FakeRepo(task)
|
||||
state = {
|
||||
"repo": repo,
|
||||
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
|
||||
}
|
||||
|
||||
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
|
||||
"biliup_next.app.task_actions.record_task_action"
|
||||
):
|
||||
result = bind_full_video_action("task-1", " BV1234567890 ")
|
||||
|
||||
self.assertEqual(result["full_video_bvid"], "BV1234567890")
|
||||
self.assertEqual(repo.context.full_video_bvid, "BV1234567890")
|
||||
self.assertEqual(len(repo.session_binding_upserts), 1)
|
||||
self.assertTrue(Path(result["path"]).exists())
|
||||
self.assertEqual(Path(result["path"]).read_text(encoding="utf-8"), "BV1234567890")
|
||||
|
||||
def test_rebind_session_full_video_action_updates_binding_and_all_task_files(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
context = TaskContext(
|
||||
id=None,
|
||||
task_id="task-1",
|
||||
session_key="session-1",
|
||||
streamer="streamer",
|
||||
room_id="room",
|
||||
source_title="task-title",
|
||||
segment_started_at=None,
|
||||
segment_duration_seconds=None,
|
||||
full_video_bvid="BVOLD",
|
||||
created_at="2026-01-01T00:00:00+00:00",
|
||||
updated_at="2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
repo = FakeRepo(task, context=context, contexts=[context])
|
||||
state = {
|
||||
"repo": repo,
|
||||
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
|
||||
}
|
||||
|
||||
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
|
||||
"biliup_next.app.task_actions.record_task_action"
|
||||
):
|
||||
result = rebind_session_full_video_action("session-1", "BVNEW1234567")
|
||||
|
||||
self.assertEqual(result["updated_count"], 1)
|
||||
self.assertEqual(repo.context.full_video_bvid, "BVNEW1234567")
|
||||
self.assertIsNotNone(repo.updated_session_bvid)
|
||||
self.assertEqual(len(repo.session_binding_upserts), 1)
|
||||
self.assertEqual(repo.session_binding_upserts[-1].full_video_bvid, "BVNEW1234567")
|
||||
persisted_path = Path(result["tasks"][0]["path"])
|
||||
self.assertTrue(persisted_path.exists())
|
||||
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVNEW1234567")
|
||||
|
||||
def test_merge_session_action_reuses_persist_path_for_inherited_bvid(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
|
||||
existing_context = TaskContext(
|
||||
id=None,
|
||||
task_id="existing-task",
|
||||
session_key="session-1",
|
||||
streamer="streamer",
|
||||
room_id="room",
|
||||
source_title="existing-title",
|
||||
segment_started_at=None,
|
||||
segment_duration_seconds=None,
|
||||
full_video_bvid="BVINHERITED123",
|
||||
created_at="2026-01-01T00:00:00+00:00",
|
||||
updated_at="2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
repo = FakeRepo(task, contexts=[existing_context])
|
||||
state = {
|
||||
"repo": repo,
|
||||
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
|
||||
}
|
||||
|
||||
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
|
||||
"biliup_next.app.task_actions.record_task_action"
|
||||
):
|
||||
result = merge_session_action("session-1", ["task-1"])
|
||||
|
||||
self.assertEqual(result["merged_count"], 1)
|
||||
self.assertEqual(repo.context.full_video_bvid, "BVINHERITED123")
|
||||
self.assertEqual(len(repo.session_binding_upserts), 1)
|
||||
self.assertEqual(repo.session_binding_upserts[0].full_video_bvid, "BVINHERITED123")
|
||||
self.assertIn("path", result["tasks"][0])
|
||||
persisted_path = Path(result["tasks"][0]["path"])
|
||||
self.assertTrue(persisted_path.exists())
|
||||
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVINHERITED123")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
46
tests/test_task_control_service.py
Normal file
46
tests/test_task_control_service.py
Normal file
@ -0,0 +1,46 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
from biliup_next.app.task_control_service import TaskControlService
|
||||
|
||||
|
||||
class TaskControlServiceTests(unittest.TestCase):
|
||||
def test_run_task_delegates_to_process_task(self) -> None:
|
||||
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
|
||||
|
||||
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"task_id": "task-1"}]}) as process_mock:
|
||||
result = TaskControlService(state).run_task("task-1")
|
||||
|
||||
self.assertEqual(result["processed"][0]["task_id"], "task-1")
|
||||
process_mock.assert_called_once_with("task-1")
|
||||
|
||||
def test_retry_step_delegates_with_reset_step(self) -> None:
|
||||
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
|
||||
|
||||
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"step": "publish"}]}) as process_mock:
|
||||
result = TaskControlService(state).retry_step("task-1", "publish")
|
||||
|
||||
self.assertEqual(result["processed"][0]["step"], "publish")
|
||||
process_mock.assert_called_once_with("task-1", reset_step="publish")
|
||||
|
||||
def test_reset_to_step_combines_reset_and_run_payloads(self) -> None:
|
||||
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
|
||||
reset_service = SimpleNamespace(reset_to_step=lambda task_id, step_name: {"task_id": task_id, "reset_to": step_name})
|
||||
|
||||
with patch("biliup_next.app.task_control_service.TaskResetService", return_value=reset_service) as reset_cls:
|
||||
with patch.object(reset_service, "reset_to_step", return_value={"task_id": "task-1", "reset_to": "split"}) as reset_mock:
|
||||
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"task_id": "task-1"}]}) as process_mock:
|
||||
result = TaskControlService(state).reset_to_step("task-1", "split")
|
||||
|
||||
self.assertEqual(result["reset"]["reset_to"], "split")
|
||||
self.assertEqual(result["run"]["processed"][0]["task_id"], "task-1")
|
||||
reset_cls.assert_called_once()
|
||||
reset_mock.assert_called_once_with("task-1", "split")
|
||||
process_mock.assert_called_once_with("task-1")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
70
tests/test_task_engine.py
Normal file
70
tests/test_task_engine.py
Normal file
@ -0,0 +1,70 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from types import SimpleNamespace
|
||||
|
||||
from biliup_next.app.task_engine import infer_error_step_name, next_runnable_step
|
||||
from biliup_next.core.models import TaskStep
|
||||
|
||||
|
||||
class TaskEngineTests(unittest.TestCase):
|
||||
def test_infer_error_step_name_prefers_running_step(self) -> None:
|
||||
task = SimpleNamespace(status="running")
|
||||
steps = {
|
||||
"transcribe": TaskStep(None, "task-1", "transcribe", "running", None, None, 0, None, None),
|
||||
"song_detect": TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
|
||||
}
|
||||
|
||||
self.assertEqual(infer_error_step_name(task, steps), "transcribe")
|
||||
|
||||
def test_next_runnable_step_returns_none_while_a_step_is_running(self) -> None:
|
||||
task = SimpleNamespace(id="task-1", status="running")
|
||||
steps = {
|
||||
"transcribe": TaskStep(None, "task-1", "transcribe", "running", None, None, 0, None, None),
|
||||
"song_detect": TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
|
||||
}
|
||||
state = {
|
||||
"settings": {
|
||||
"comment": {"enabled": True},
|
||||
"collection": {"enabled": True},
|
||||
"paths": {},
|
||||
"publish": {},
|
||||
}
|
||||
}
|
||||
|
||||
self.assertEqual(next_runnable_step(task, steps, state), (None, None))
|
||||
|
||||
def test_next_runnable_step_returns_wait_payload_for_retryable_publish(self) -> None:
|
||||
task = SimpleNamespace(id="task-1", status="failed_retryable")
|
||||
steps = {
|
||||
"publish": TaskStep(
|
||||
None,
|
||||
"task-1",
|
||||
"publish",
|
||||
"failed_retryable",
|
||||
"PUBLISH_UPLOAD_FAILED",
|
||||
"upload failed",
|
||||
1,
|
||||
None,
|
||||
"2099-01-01T00:00:00+00:00",
|
||||
)
|
||||
}
|
||||
state = {
|
||||
"settings": {
|
||||
"comment": {"enabled": True},
|
||||
"collection": {"enabled": True},
|
||||
"paths": {},
|
||||
"publish": {"retry_schedule_minutes": [10]},
|
||||
}
|
||||
}
|
||||
|
||||
step_name, waiting_payload = next_runnable_step(task, steps, state)
|
||||
|
||||
self.assertIsNone(step_name)
|
||||
self.assertIsNotNone(waiting_payload)
|
||||
self.assertTrue(waiting_payload["waiting_for_retry"])
|
||||
self.assertEqual(waiting_payload["step"], "publish")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
75
tests/test_task_policies.py
Normal file
75
tests/test_task_policies.py
Normal file
@ -0,0 +1,75 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from types import SimpleNamespace
|
||||
|
||||
from biliup_next.app.task_policies import apply_disabled_step_fallbacks, resolve_failure
|
||||
from biliup_next.core.errors import ModuleError
|
||||
from biliup_next.core.models import TaskStep
|
||||
|
||||
|
||||
class FakePolicyRepo:
|
||||
def __init__(self, task, steps: list[TaskStep]) -> None: # type: ignore[no-untyped-def]
|
||||
self.task = task
|
||||
self.steps = steps
|
||||
self.step_updates: list[tuple] = []
|
||||
self.task_updates: list[tuple] = []
|
||||
|
||||
def get_task(self, task_id: str): # type: ignore[no-untyped-def]
|
||||
return self.task if task_id == self.task.id else None
|
||||
|
||||
def list_steps(self, task_id: str) -> list[TaskStep]:
|
||||
return list(self.steps) if task_id == self.task.id else []
|
||||
|
||||
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # type: ignore[no-untyped-def]
|
||||
self.step_updates.append((task_id, step_name, status, kwargs))
|
||||
|
||||
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
|
||||
self.task_updates.append((task_id, status, updated_at))
|
||||
|
||||
|
||||
class TaskPoliciesTests(unittest.TestCase):
|
||||
def test_apply_disabled_step_fallbacks_marks_collection_done_when_disabled(self) -> None:
|
||||
task = SimpleNamespace(id="task-1", status="commented")
|
||||
repo = FakePolicyRepo(task, [])
|
||||
state = {
|
||||
"settings": {
|
||||
"comment": {"enabled": True},
|
||||
"collection": {"enabled": False},
|
||||
"paths": {},
|
||||
"publish": {},
|
||||
}
|
||||
}
|
||||
|
||||
changed = apply_disabled_step_fallbacks(state, task, repo)
|
||||
|
||||
self.assertTrue(changed)
|
||||
self.assertEqual([update[1] for update in repo.step_updates], ["collection_a", "collection_b"])
|
||||
self.assertEqual(repo.task_updates[-1][1], "collection_synced")
|
||||
|
||||
def test_resolve_failure_uses_publish_retry_schedule(self) -> None:
|
||||
task = SimpleNamespace(id="task-1", status="running")
|
||||
steps = [
|
||||
TaskStep(None, "task-1", "publish", "running", None, None, 0, "2026-01-01T00:00:00+00:00", None),
|
||||
]
|
||||
repo = FakePolicyRepo(task, steps)
|
||||
state = {
|
||||
"settings": {
|
||||
"publish": {"retry_schedule_minutes": [15, 5]},
|
||||
"comment": {},
|
||||
"paths": {},
|
||||
}
|
||||
}
|
||||
exc = ModuleError(code="PUBLISH_UPLOAD_FAILED", message="upload failed", retryable=True)
|
||||
|
||||
failure = resolve_failure(task, repo, state, exc)
|
||||
|
||||
self.assertEqual(failure["step_name"], "publish")
|
||||
self.assertEqual(failure["payload"]["retry_status"], "failed_retryable")
|
||||
self.assertEqual(failure["payload"]["next_retry_delay_seconds"], 900)
|
||||
self.assertEqual(repo.step_updates[-1][1], "publish")
|
||||
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
121
tests/test_task_repository_sqlite.py
Normal file
121
tests/test_task_repository_sqlite.py
Normal file
@ -0,0 +1,121 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from biliup_next.core.models import SessionBinding, Task, TaskContext, TaskStep
|
||||
from biliup_next.infra.db import Database
|
||||
from biliup_next.infra.task_repository import TaskRepository
|
||||
|
||||
|
||||
class TaskRepositorySqliteTests(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.tempdir = tempfile.TemporaryDirectory()
|
||||
db_path = Path(self.tempdir.name) / "test.db"
|
||||
self.db = Database(db_path)
|
||||
self.db.initialize()
|
||||
self.repo = TaskRepository(self.db)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.tempdir.cleanup()
|
||||
|
||||
def test_query_tasks_filters_and_sorts_by_updated_desc(self) -> None:
|
||||
self.repo.upsert_task(Task("task-1", "local_file", "/tmp/a.mp4", "Alpha", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"))
|
||||
self.repo.upsert_task(Task("task-2", "local_file", "/tmp/b.mp4", "Beta", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:03:00+00:00"))
|
||||
self.repo.upsert_task(Task("task-3", "local_file", "/tmp/c.mp4", "Gamma", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00"))
|
||||
|
||||
items, total = self.repo.query_tasks(status="published", search="a", sort="updated_desc")
|
||||
|
||||
self.assertEqual(total, 2)
|
||||
self.assertEqual([item.id for item in items], ["task-2", "task-3"])
|
||||
|
||||
def test_list_task_contexts_and_steps_for_task_ids_returns_batched_maps(self) -> None:
|
||||
self.repo.upsert_task(Task("task-1", "local_file", "/tmp/a.mp4", "Alpha", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"))
|
||||
self.repo.upsert_task(Task("task-2", "local_file", "/tmp/b.mp4", "Beta", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00"))
|
||||
self.repo.upsert_task_context(
|
||||
TaskContext(
|
||||
id=None,
|
||||
task_id="task-1",
|
||||
session_key="session-1",
|
||||
streamer="streamer",
|
||||
room_id="room",
|
||||
source_title="Alpha",
|
||||
segment_started_at="2026-01-01T00:00:00+00:00",
|
||||
segment_duration_seconds=60.0,
|
||||
full_video_bvid="BV123",
|
||||
created_at="2026-01-01T00:00:00+00:00",
|
||||
updated_at="2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
)
|
||||
self.repo.replace_steps(
|
||||
"task-1",
|
||||
[
|
||||
TaskStep(None, "task-1", "transcribe", "pending", None, None, 0, None, None),
|
||||
TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
|
||||
],
|
||||
)
|
||||
self.repo.replace_steps(
|
||||
"task-2",
|
||||
[
|
||||
TaskStep(None, "task-2", "transcribe", "running", None, None, 0, "2026-01-01T00:03:00+00:00", None),
|
||||
],
|
||||
)
|
||||
|
||||
contexts = self.repo.list_task_contexts_for_task_ids(["task-1", "task-2"])
|
||||
steps = self.repo.list_steps_for_task_ids(["task-1", "task-2"])
|
||||
|
||||
self.assertEqual(set(contexts.keys()), {"task-1"})
|
||||
self.assertEqual(contexts["task-1"].full_video_bvid, "BV123")
|
||||
self.assertEqual([step.step_name for step in steps["task-1"]], ["transcribe", "song_detect"])
|
||||
self.assertEqual(steps["task-2"][0].status, "running")
|
||||
|
||||
def test_session_binding_supports_upsert_and_source_title_fallback_lookup(self) -> None:
|
||||
self.repo.upsert_session_binding(
|
||||
SessionBinding(
|
||||
id=None,
|
||||
session_key="session-1",
|
||||
source_title="Alpha",
|
||||
streamer="streamer",
|
||||
room_id="room",
|
||||
full_video_bvid="BVOLD",
|
||||
created_at="2026-01-01T00:00:00+00:00",
|
||||
updated_at="2026-01-01T00:00:00+00:00",
|
||||
)
|
||||
)
|
||||
self.repo.upsert_session_binding(
|
||||
SessionBinding(
|
||||
id=None,
|
||||
session_key="session-1",
|
||||
source_title="Alpha",
|
||||
streamer="streamer",
|
||||
room_id="room",
|
||||
full_video_bvid="BVNEW",
|
||||
created_at="2026-01-01T00:01:00+00:00",
|
||||
updated_at="2026-01-01T00:01:00+00:00",
|
||||
)
|
||||
)
|
||||
self.repo.upsert_session_binding(
|
||||
SessionBinding(
|
||||
id=None,
|
||||
session_key=None,
|
||||
source_title="Beta",
|
||||
streamer="streamer-2",
|
||||
room_id="room-2",
|
||||
full_video_bvid="BVBETA",
|
||||
created_at="2026-01-01T00:02:00+00:00",
|
||||
updated_at="2026-01-01T00:02:00+00:00",
|
||||
)
|
||||
)
|
||||
|
||||
binding_by_session = self.repo.get_session_binding(session_key="session-1")
|
||||
binding_by_title = self.repo.get_session_binding(source_title="Beta")
|
||||
|
||||
self.assertIsNotNone(binding_by_session)
|
||||
self.assertEqual(binding_by_session.full_video_bvid, "BVNEW")
|
||||
self.assertIsNotNone(binding_by_title)
|
||||
self.assertEqual(binding_by_title.full_video_bvid, "BVBETA")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
102
tests/test_task_runner.py
Normal file
102
tests/test_task_runner.py
Normal file
@ -0,0 +1,102 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
from biliup_next.app.task_runner import process_task
|
||||
from biliup_next.core.models import TaskStep
|
||||
|
||||
|
||||
class FakeRunnerRepo:
|
||||
def __init__(self, task, steps: list[TaskStep]) -> None: # type: ignore[no-untyped-def]
|
||||
self.task = task
|
||||
self.steps = steps
|
||||
self.step_updates: list[tuple] = []
|
||||
self.task_updates: list[tuple] = []
|
||||
self.claims: list[tuple[str, str, str]] = []
|
||||
|
||||
def get_task(self, task_id: str): # type: ignore[no-untyped-def]
|
||||
return self.task if task_id == self.task.id else None
|
||||
|
||||
def list_steps(self, task_id: str) -> list[TaskStep]:
|
||||
return list(self.steps) if task_id == self.task.id else []
|
||||
|
||||
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # type: ignore[no-untyped-def]
|
||||
self.step_updates.append((task_id, step_name, status, kwargs))
|
||||
for index, step in enumerate(self.steps):
|
||||
if step.task_id == task_id and step.step_name == step_name:
|
||||
self.steps[index] = TaskStep(
|
||||
step.id,
|
||||
step.task_id,
|
||||
step.step_name,
|
||||
status,
|
||||
kwargs.get("error_code", step.error_code),
|
||||
kwargs.get("error_message", step.error_message),
|
||||
kwargs.get("retry_count", step.retry_count),
|
||||
kwargs.get("started_at", step.started_at),
|
||||
kwargs.get("finished_at", step.finished_at),
|
||||
)
|
||||
|
||||
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
|
||||
self.task_updates.append((task_id, status, updated_at))
|
||||
if task_id == self.task.id:
|
||||
self.task = SimpleNamespace(**{**self.task.__dict__, "status": status, "updated_at": updated_at})
|
||||
|
||||
def claim_step_running(self, task_id: str, step_name: str, *, started_at: str) -> bool:
|
||||
self.claims.append((task_id, step_name, started_at))
|
||||
for index, step in enumerate(self.steps):
|
||||
if step.task_id == task_id and step.step_name == step_name:
|
||||
self.steps[index] = TaskStep(step.id, step.task_id, step.step_name, "running", None, None, step.retry_count, started_at, None)
|
||||
return True
|
||||
|
||||
|
||||
class TaskRunnerTests(unittest.TestCase):
|
||||
def test_process_task_reset_step_marks_task_back_to_pre_step_status(self) -> None:
|
||||
task = SimpleNamespace(id="task-1", status="failed_retryable", updated_at="2026-01-01T00:00:00+00:00")
|
||||
steps = [
|
||||
TaskStep(None, "task-1", "transcribe", "failed_retryable", "ERR", "boom", 1, "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"),
|
||||
]
|
||||
repo = FakeRunnerRepo(task, steps)
|
||||
state = {
|
||||
"repo": repo,
|
||||
"settings": {"ingest": {}, "paths": {}, "comment": {"enabled": True}, "collection": {"enabled": True}, "publish": {}},
|
||||
}
|
||||
|
||||
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
|
||||
"biliup_next.app.task_runner.record_task_action"
|
||||
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
|
||||
"biliup_next.app.task_runner.next_runnable_step", return_value=(None, None)
|
||||
):
|
||||
result = process_task("task-1", reset_step="transcribe")
|
||||
|
||||
self.assertTrue(result["processed"][0]["reset"])
|
||||
self.assertEqual(repo.step_updates[0][1], "transcribe")
|
||||
self.assertEqual(repo.step_updates[0][2], "pending")
|
||||
self.assertEqual(repo.task_updates[0][1], "created")
|
||||
|
||||
def test_process_task_sets_task_running_before_execute_step(self) -> None:
|
||||
task = SimpleNamespace(id="task-1", status="created", updated_at="2026-01-01T00:00:00+00:00")
|
||||
steps = [
|
||||
TaskStep(None, "task-1", "transcribe", "pending", None, None, 0, None, None),
|
||||
]
|
||||
repo = FakeRunnerRepo(task, steps)
|
||||
state = {
|
||||
"repo": repo,
|
||||
"settings": {"ingest": {}, "paths": {}, "comment": {"enabled": True}, "collection": {"enabled": True}, "publish": {}},
|
||||
}
|
||||
|
||||
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
|
||||
"biliup_next.app.task_runner.record_task_action"
|
||||
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
|
||||
"biliup_next.app.task_runner.next_runnable_step", side_effect=[("transcribe", None), (None, None)]
|
||||
), patch("biliup_next.app.task_runner.execute_step", return_value={"task_id": "task-1", "step": "transcribe"}):
|
||||
result = process_task("task-1")
|
||||
|
||||
self.assertEqual(repo.claims[0][1], "transcribe")
|
||||
self.assertEqual(repo.task_updates[0][1], "running")
|
||||
self.assertEqual(result["processed"][0]["step"], "transcribe")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user