feat: package docker deployment and publish flow

This commit is contained in:
theshy
2026-04-22 16:20:03 +08:00
parent 055474360e
commit 2146687dc6
178 changed files with 24318 additions and 20855 deletions

File diff suppressed because it is too large Load Diff

View File

@ -88,7 +88,7 @@ class BilibiliTopCommentProviderTests(unittest.TestCase):
self.assertEqual(result["split"]["reason"], "comment_disabled")
self.assertEqual(len(api.reply_calls), 1)
self.assertIn("P1:\n1. Song A — Artist A", api.reply_calls[0]["content"])
self.assertIn("P2:\n1. Song B — Artist B", api.reply_calls[0]["content"])
self.assertIn("P2:\n2. Song B — Artist B", api.reply_calls[0]["content"])
def test_split_comment_skips_on_non_anchor_task(self) -> None:
api = _FakeBilibiliApi()
@ -212,6 +212,63 @@ class BilibiliTopCommentProviderTests(unittest.TestCase):
self.assertEqual(result["split"]["reason"], "comment_disabled")
self.assertTrue((work_dir / "comment_done.flag").exists())
def test_comment_format_can_be_configured_from_upload_config(self) -> None:
api = _FakeBilibiliApi()
provider = BilibiliTopCommentProvider(bilibili_api=api)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
work_dir = root / "task-1"
work_dir.mkdir(parents=True, exist_ok=True)
task = Task(
id="task-1",
source_type="local_file",
source_path=str(work_dir / "source.mp4"),
title="task-1",
status="published",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
(work_dir / "songs.txt").write_text("00:00:00 Song From Text — Artist T\n", encoding="utf-8")
(work_dir / "songs.json").write_text(
json.dumps({"songs": [{"title": "Song A", "artist": "Artist A"}]}),
encoding="utf-8",
)
(work_dir / "bvid.txt").write_text("BV1COMMENT123", encoding="utf-8")
(work_dir / "full_video_bvid.txt").write_text("BV1FULL12345", encoding="utf-8")
cookies_file = root / "cookies.json"
cookies_file.write_text("{}", encoding="utf-8")
upload_config = root / "upload_config.json"
upload_config.write_text(
json.dumps(
{
"comment_template": {
"split_header": "这是纯享:{current_full_video_link}\n上一场:{previous_full_video_link}",
"split_song_line": "#{song_index} {title} / {artist}",
}
}
),
encoding="utf-8",
)
result = provider.comment(
task,
{
"session_dir": str(root),
"cookies_file": str(cookies_file),
"upload_config_file": str(upload_config),
"post_split_comment": True,
"post_full_video_timeline_comment": False,
},
)
self.assertEqual(result["status"], "ok")
self.assertEqual(result["split"]["reason"], "comment_disabled")
self.assertEqual(len(api.reply_calls), 1)
content = str(api.reply_calls[0]["content"])
self.assertIn("这是纯享https://www.bilibili.com/video/BV1FULL12345", content)
self.assertNotIn("上一场:", content)
self.assertIn("#1 Song A / Artist A", content)
def test_full_comment_aggregates_session_parts_on_anchor_task(self) -> None:
api = _FakeBilibiliApi()
provider = BilibiliTopCommentProvider(bilibili_api=api)
@ -263,8 +320,8 @@ class BilibiliTopCommentProviderTests(unittest.TestCase):
self.assertEqual(result["full"]["status"], "skipped")
self.assertEqual(result["full"]["reason"], "comment_disabled")
self.assertEqual(len(api.reply_calls), 1)
self.assertIn("P1:\n00:00:01 Song A\n00:02:00 Song B", api.reply_calls[0]["content"])
self.assertIn("P2:\n00:00:03 Song C", api.reply_calls[0]["content"])
self.assertIn("P1:\n1. 00:00:01 Song A\n2. 00:02:00 Song B", api.reply_calls[0]["content"])
self.assertIn("P2:\n3. 00:00:03 Song C", api.reply_calls[0]["content"])
def test_full_comment_skips_on_non_anchor_task(self) -> None:
api = _FakeBilibiliApi()

View File

@ -269,6 +269,117 @@ class BiliupCliPublishProviderTests(unittest.TestCase):
self.assertIn("BV1RESUME1234", adapter.run_calls[0]["cmd"])
self.assertTrue((work_dir / "upload_done.flag").exists())
def test_publish_recovers_bvid_from_progress_when_bvid_file_was_removed(self) -> None:
adapter = _FakeBiliupAdapter()
provider = BiliupCliPublishProvider(adapter=adapter)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
work_dir = root / "task-1"
work_dir.mkdir(parents=True, exist_ok=True)
task = Task(
id="task-1",
source_type="local_file",
source_path=str(work_dir / "source.mp4"),
title="task-1",
status="split_done",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
(work_dir / "songs.txt").write_text("00:00:00 Test Song - Tester\n", encoding="utf-8")
(work_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Test Song"}]}), encoding="utf-8")
(work_dir / "publish_progress.json").write_text(
json.dumps({"bvid": "BV1RESUME1234", "completed_append_batches": [2]}),
encoding="utf-8",
)
upload_config = root / "upload_config.json"
upload_config.write_text("{}", encoding="utf-8")
clips = []
for index in range(1, 16):
clip_path = work_dir / f"clip-{index}.mp4"
clip_path.write_text("fake", encoding="utf-8")
clips.append(
Artifact(
id=None,
task_id=task.id,
artifact_type="clip_video",
path=str(clip_path),
metadata_json="{}",
created_at=utc_now_iso(),
)
)
with patch("biliup_next.modules.publish.providers.biliup_cli.time.sleep", return_value=None):
record = provider.publish(
task,
clips,
{
"session_dir": str(root),
"upload_config_file": str(upload_config),
"biliup_path": "runtime/biliup",
"cookie_file": "runtime/cookies.json",
"retry_count": 2,
"command_timeout_seconds": 123,
},
)
self.assertEqual(record.bvid, "BV1RESUME1234")
self.assertEqual((work_dir / "bvid.txt").read_text(encoding="utf-8"), "BV1RESUME1234")
self.assertEqual(len(adapter.run_calls), 1)
self.assertIn("append", adapter.run_calls[0]["cmd"])
self.assertIn("BV1RESUME1234", adapter.run_calls[0]["cmd"])
def test_publish_renumbers_clip_filenames_across_aggregated_sessions(self) -> None:
adapter = _FakeBiliupAdapter()
provider = BiliupCliPublishProvider(adapter=adapter)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
work_dir = root / "task-1"
second_dir = root / "task-2"
work_dir.mkdir(parents=True, exist_ok=True)
second_dir.mkdir(parents=True, exist_ok=True)
task = Task(
id="task-1",
source_type="local_file",
source_path=str(work_dir / "source.mp4"),
title="task-1",
status="split_done",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
(work_dir / "songs.txt").write_text("00:00:00 Test Song - Tester\n", encoding="utf-8")
(work_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Test Song"}]}), encoding="utf-8")
upload_config = root / "upload_config.json"
upload_config.write_text("{}", encoding="utf-8")
clips = []
for index in range(1, 11):
clip_path = work_dir / f"{index:02d}_first-{index}.mp4"
clip_path.write_text("fake", encoding="utf-8")
clips.append(Artifact(None, task.id, "clip_video", str(clip_path), "{}", utc_now_iso()))
for index in range(1, 8):
clip_path = second_dir / f"{index:02d}_second-{index}.mp4"
clip_path.write_text("fake", encoding="utf-8")
clips.append(Artifact(None, "task-2", "clip_video", str(clip_path), "{}", utc_now_iso()))
with patch("biliup_next.modules.publish.providers.biliup_cli.time.sleep", return_value=None):
provider.publish(
task,
clips,
{
"session_dir": str(root),
"upload_config_file": str(upload_config),
"biliup_path": "runtime/biliup",
"cookie_file": "runtime/cookies.json",
"retry_count": 1,
"command_timeout_seconds": 123,
},
)
all_uploaded = [part for call in adapter.run_calls for part in call["cmd"] if str(part).endswith(".mp4")]
self.assertTrue(all_uploaded[0].endswith("01_first-1.mp4"))
self.assertTrue(all_uploaded[9].endswith("10_first-10.mp4"))
self.assertTrue(all_uploaded[10].endswith("11_second-1.mp4"))
self.assertTrue(all_uploaded[16].endswith("17_second-7.mp4"))
def test_publish_creates_progress_from_existing_bvid_for_append_resume(self) -> None:
adapter = _FakeBiliupAdapter()
provider = BiliupCliPublishProvider(adapter=adapter)

View File

@ -0,0 +1,85 @@
from __future__ import annotations
import unittest
from types import SimpleNamespace
from biliup_next.core.models import Task, TaskStep, utc_now_iso
from biliup_next.modules.collection.service import CollectionService
class _FakeRegistry:
def __init__(self, provider) -> None: # type: ignore[no-untyped-def]
self.provider = provider
def get(self, provider_type: str, provider_id: str): # type: ignore[no-untyped-def]
return self.provider
class _FakeProvider:
def sync(self, task, target: str, settings: dict[str, object]) -> dict[str, object]: # type: ignore[no-untyped-def]
return {"status": "skipped", "target": target}
class _FakeRepo:
def __init__(self) -> None:
now = utc_now_iso()
self.task = Task("task-1", "local_file", "/tmp/source.mp4", "task-1", "running", now, now)
self.steps = {
"collection_a": TaskStep(None, "task-1", "collection_a", "pending", None, None, 0, None, None),
"collection_b": TaskStep(None, "task-1", "collection_b", "pending", None, None, 0, None, None),
}
self.task_status_updates: list[tuple[str, str]] = []
def get_task(self, task_id: str): # type: ignore[no-untyped-def]
return self.task if task_id == self.task.id else None
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # type: ignore[no-untyped-def]
step = self.steps[step_name]
self.steps[step_name] = TaskStep(
step.id,
step.task_id,
step.step_name,
status,
kwargs.get("error_code", step.error_code),
kwargs.get("error_message", step.error_message),
kwargs.get("retry_count", step.retry_count),
kwargs.get("started_at", step.started_at),
kwargs.get("finished_at", step.finished_at),
)
def list_steps(self, task_id: str) -> list[TaskStep]:
return list(self.steps.values())
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_status_updates.append((task_id, status))
class CollectionServiceTests(unittest.TestCase):
def test_collection_a_restores_commented_status_so_collection_b_can_run(self) -> None:
repo = _FakeRepo()
service = CollectionService(_FakeRegistry(_FakeProvider()), repo) # type: ignore[arg-type]
service.cleanup = SimpleNamespace(cleanup_task_outputs=lambda task_id, settings: {}) # type: ignore[assignment]
result = service.run("task-1", "a", {"provider": "fake"})
self.assertEqual(result["status"], "skipped")
self.assertEqual(repo.steps["collection_a"].status, "succeeded")
self.assertEqual(repo.steps["collection_b"].status, "pending")
self.assertEqual(repo.task_status_updates[-1], ("task-1", "commented"))
def test_collection_b_marks_collection_synced_when_both_steps_succeeded(self) -> None:
repo = _FakeRepo()
repo.steps["collection_a"] = TaskStep(None, "task-1", "collection_a", "succeeded", None, None, 0, None, utc_now_iso())
service = CollectionService(_FakeRegistry(_FakeProvider()), repo) # type: ignore[arg-type]
service.cleanup = SimpleNamespace(cleanup_task_outputs=lambda task_id, settings: {"deleted": []}) # type: ignore[assignment]
result = service.run("task-1", "b", {"provider": "fake"})
self.assertEqual(result["status"], "skipped")
self.assertEqual(repo.steps["collection_b"].status, "succeeded")
self.assertEqual(repo.task_status_updates[-1], ("task-1", "collection_synced"))
self.assertEqual(result["cleanup"], {"deleted": []})
if __name__ == "__main__":
unittest.main()

View File

@ -1,149 +1,149 @@
from __future__ import annotations
import tempfile
import unittest
from http import HTTPStatus
from pathlib import Path
from types import SimpleNamespace
from biliup_next.app.control_plane_get_dispatcher import ControlPlaneGetDispatcher
from biliup_next.core.models import ActionRecord, Task, TaskContext
class FakeRepo:
def __init__(self, task: Task, context: TaskContext | None = None, actions: list[ActionRecord] | None = None) -> None:
self.task = task
self.context = context
self.actions = actions or []
def query_tasks(self, **kwargs): # type: ignore[no-untyped-def]
return [self.task], 1
def get_task(self, task_id: str) -> Task | None:
return self.task if task_id == self.task.id else None
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if self.context and self.context.task_id == task_id else None
def list_task_contexts_for_task_ids(self, task_ids: list[str]) -> dict[str, TaskContext]:
if self.context and self.context.task_id in task_ids:
return {self.context.task_id: self.context}
return {}
def list_steps_for_task_ids(self, task_ids: list[str]) -> dict[str, list[object]]:
return {self.task.id: []} if self.task.id in task_ids else {}
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
if self.context and self.context.session_key == session_key:
return [self.context]
return []
def list_steps(self, task_id: str) -> list[object]:
return []
def list_artifacts(self, task_id: str) -> list[object]:
return []
def list_action_records(
self,
task_id: str | None = None,
limit: int = 200,
action_name: str | None = None,
status: str | None = None,
) -> list[ActionRecord]:
items = list(self.actions)
if task_id is not None:
items = [item for item in items if item.task_id == task_id]
if action_name is not None:
items = [item for item in items if item.action_name == action_name]
if status is not None:
items = [item for item in items if item.status == status]
return items[:limit]
class FakeSettingsService:
def __init__(self, root) -> None: # type: ignore[no-untyped-def]
self.root = root
def load_redacted(self):
return SimpleNamespace(settings={"runtime": {"control_token": "secret"}})
def load(self):
return SimpleNamespace(schema={"title": "SettingsSchema"})
class ControlPlaneGetDispatcherTests(unittest.TestCase):
def _dispatcher(self, tmpdir: str, repo: FakeRepo) -> ControlPlaneGetDispatcher:
state = {
"root": Path(tmpdir),
"repo": repo,
"settings": {
"paths": {"session_dir": str(Path(tmpdir) / "session")},
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
"cleanup": {},
"publish": {},
},
"registry": SimpleNamespace(list_manifests=lambda: [{"name": "publish.biliup_cli"}]),
"manifests": [{"name": "publish.biliup_cli"}],
}
return ControlPlaneGetDispatcher(
state,
attention_state_fn=lambda payload: "running" if payload.get("status") == "running" else "stable",
delivery_state_label_fn=lambda payload: "pending_comment" if payload.get("delivery_state", {}).get("split_comment") == "pending" else "stable",
build_scheduler_preview_fn=lambda state, include_stage_scan=False, limit=200: {"items": [{"limit": limit}]},
settings_service_factory=FakeSettingsService,
)
def test_handle_settings_schema_returns_schema(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
body, status = dispatcher.handle_settings_schema()
self.assertEqual(status, HTTPStatus.OK)
self.assertEqual(body["title"], "SettingsSchema")
def test_handle_history_filters_records(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
actions = [
ActionRecord(None, "task-1", "comment", "ok", "comment ok", "{}", "2026-01-01T00:01:00+00:00"),
ActionRecord(None, "task-1", "publish", "error", "publish failed", "{}", "2026-01-01T00:02:00+00:00"),
]
dispatcher = self._dispatcher(tmpdir, FakeRepo(task, actions=actions))
body, status = dispatcher.handle_history(limit=100, task_id="task-1", action_name="comment", status="ok")
self.assertEqual(status, HTTPStatus.OK)
self.assertEqual(len(body["items"]), 1)
self.assertEqual(body["items"][0]["action_name"], "comment")
def test_handle_session_returns_not_found_when_missing(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
body, status = dispatcher.handle_session("missing-session")
self.assertEqual(status, HTTPStatus.NOT_FOUND)
self.assertEqual(body["error"], "session not found")
def test_handle_tasks_filters_attention(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "running", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
body, status = dispatcher.handle_tasks(
limit=10,
offset=0,
status=None,
search=None,
sort="updated_desc",
attention="running",
delivery=None,
)
self.assertEqual(status, HTTPStatus.OK)
self.assertEqual(body["total"], 1)
self.assertEqual(body["items"][0]["id"], "task-1")
from __future__ import annotations
import tempfile
import unittest
from http import HTTPStatus
from pathlib import Path
from types import SimpleNamespace
from biliup_next.app.control_plane_get_dispatcher import ControlPlaneGetDispatcher
from biliup_next.core.models import ActionRecord, Task, TaskContext
class FakeRepo:
def __init__(self, task: Task, context: TaskContext | None = None, actions: list[ActionRecord] | None = None) -> None:
self.task = task
self.context = context
self.actions = actions or []
def query_tasks(self, **kwargs): # type: ignore[no-untyped-def]
return [self.task], 1
def get_task(self, task_id: str) -> Task | None:
return self.task if task_id == self.task.id else None
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if self.context and self.context.task_id == task_id else None
def list_task_contexts_for_task_ids(self, task_ids: list[str]) -> dict[str, TaskContext]:
if self.context and self.context.task_id in task_ids:
return {self.context.task_id: self.context}
return {}
def list_steps_for_task_ids(self, task_ids: list[str]) -> dict[str, list[object]]:
return {self.task.id: []} if self.task.id in task_ids else {}
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
if self.context and self.context.session_key == session_key:
return [self.context]
return []
def list_steps(self, task_id: str) -> list[object]:
return []
def list_artifacts(self, task_id: str) -> list[object]:
return []
def list_action_records(
self,
task_id: str | None = None,
limit: int = 200,
action_name: str | None = None,
status: str | None = None,
) -> list[ActionRecord]:
items = list(self.actions)
if task_id is not None:
items = [item for item in items if item.task_id == task_id]
if action_name is not None:
items = [item for item in items if item.action_name == action_name]
if status is not None:
items = [item for item in items if item.status == status]
return items[:limit]
class FakeSettingsService:
def __init__(self, root) -> None: # type: ignore[no-untyped-def]
self.root = root
def load_redacted(self):
return SimpleNamespace(settings={"runtime": {"control_token": "secret"}})
def load(self):
return SimpleNamespace(schema={"title": "SettingsSchema"})
class ControlPlaneGetDispatcherTests(unittest.TestCase):
def _dispatcher(self, tmpdir: str, repo: FakeRepo) -> ControlPlaneGetDispatcher:
state = {
"root": Path(tmpdir),
"repo": repo,
"settings": {
"paths": {"session_dir": str(Path(tmpdir) / "session")},
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
"cleanup": {},
"publish": {},
},
"registry": SimpleNamespace(list_manifests=lambda: [{"name": "publish.biliup_cli"}]),
"manifests": [{"name": "publish.biliup_cli"}],
}
return ControlPlaneGetDispatcher(
state,
attention_state_fn=lambda payload: "running" if payload.get("status") == "running" else "stable",
delivery_state_label_fn=lambda payload: "pending_comment" if payload.get("delivery_state", {}).get("split_comment") == "pending" else "stable",
build_scheduler_preview_fn=lambda state, include_stage_scan=False, limit=200: {"items": [{"limit": limit}]},
settings_service_factory=FakeSettingsService,
)
def test_handle_settings_schema_returns_schema(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
body, status = dispatcher.handle_settings_schema()
self.assertEqual(status, HTTPStatus.OK)
self.assertEqual(body["title"], "SettingsSchema")
def test_handle_history_filters_records(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
actions = [
ActionRecord(None, "task-1", "comment", "ok", "comment ok", "{}", "2026-01-01T00:01:00+00:00"),
ActionRecord(None, "task-1", "publish", "error", "publish failed", "{}", "2026-01-01T00:02:00+00:00"),
]
dispatcher = self._dispatcher(tmpdir, FakeRepo(task, actions=actions))
body, status = dispatcher.handle_history(limit=100, task_id="task-1", action_name="comment", status="ok")
self.assertEqual(status, HTTPStatus.OK)
self.assertEqual(len(body["items"]), 1)
self.assertEqual(body["items"][0]["action_name"], "comment")
def test_handle_session_returns_not_found_when_missing(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
body, status = dispatcher.handle_session("missing-session")
self.assertEqual(status, HTTPStatus.NOT_FOUND)
self.assertEqual(body["error"], "session not found")
def test_handle_tasks_filters_attention(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "running", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
dispatcher = self._dispatcher(tmpdir, FakeRepo(task))
body, status = dispatcher.handle_tasks(
limit=10,
offset=0,
status=None,
search=None,
sort="updated_desc",
attention="running",
delivery=None,
)
self.assertEqual(status, HTTPStatus.OK)
self.assertEqual(body["total"], 1)
self.assertEqual(body["items"][0]["id"], "task-1")

View File

@ -1,111 +1,111 @@
from __future__ import annotations
import io
import tempfile
import unittest
from http import HTTPStatus
from pathlib import Path
from types import SimpleNamespace
from biliup_next.app.control_plane_post_dispatcher import ControlPlanePostDispatcher
from biliup_next.core.models import Task
class FakeRepo:
def __init__(self) -> None:
self.actions = []
def add_action_record(self, action) -> None: # type: ignore[no-untyped-def]
self.actions.append(action)
class ModuleError(Exception):
def to_dict(self) -> dict[str, object]:
return {"error": "conflict"}
class ControlPlanePostDispatcherTests(unittest.TestCase):
def _dispatcher(self, tmpdir: str, repo: FakeRepo, *, ingest_service: object | None = None) -> ControlPlanePostDispatcher:
state = {
"repo": repo,
"root": Path(tmpdir),
"settings": {
"paths": {"stage_dir": str(Path(tmpdir) / "stage"), "session_dir": str(Path(tmpdir) / "session")},
"ingest": {"stage_min_free_space_mb": 100},
},
"ingest_service": ingest_service or SimpleNamespace(
create_task_from_file=lambda path, settings: Task(
"task-1",
"local_file",
str(path),
"task-title",
"created",
"2026-01-01T00:00:00+00:00",
"2026-01-01T00:00:00+00:00",
)
),
}
return ControlPlanePostDispatcher(
state,
bind_full_video_action=lambda task_id, bvid: {"task_id": task_id, "full_video_bvid": bvid},
merge_session_action=lambda session_key, task_ids: {"session_key": session_key, "task_ids": task_ids},
receive_full_video_webhook=lambda payload: {"ok": True, **payload},
rebind_session_full_video_action=lambda session_key, bvid: {"session_key": session_key, "full_video_bvid": bvid},
reset_to_step_action=lambda task_id, step_name: {"task_id": task_id, "step_name": step_name},
retry_step_action=lambda task_id, step_name: {"task_id": task_id, "step_name": step_name},
run_task_action=lambda task_id: {"task_id": task_id},
run_once=lambda: {"scheduler": {"scan_count": 1}, "worker": {"picked": 1}},
stage_importer_factory=lambda: SimpleNamespace(
import_file=lambda source, dest, min_free_bytes=0: {"imported_to": str(dest / source.name)},
import_upload=lambda filename, fileobj, dest, min_free_bytes=0: {"filename": filename, "dest": str(dest)},
),
systemd_runtime_factory=lambda: SimpleNamespace(act=lambda service, action: {"service": service, "action": action, "command_ok": True}),
)
def test_handle_bind_full_video_maps_missing_bvid(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
dispatcher = self._dispatcher(tmpdir, FakeRepo())
body, status = dispatcher.handle_bind_full_video("task-1", {})
self.assertEqual(status, HTTPStatus.BAD_REQUEST)
self.assertEqual(body["error"], "missing full_video_bvid")
def test_handle_worker_run_once_records_action(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
repo = FakeRepo()
dispatcher = self._dispatcher(tmpdir, repo)
body, status = dispatcher.handle_worker_run_once()
self.assertEqual(status, HTTPStatus.ACCEPTED)
self.assertEqual(body["worker"]["picked"], 1)
self.assertEqual(repo.actions[-1].action_name, "worker_run_once")
def test_handle_stage_upload_returns_created(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
dispatcher = self._dispatcher(tmpdir, FakeRepo())
file_item = SimpleNamespace(filename="incoming.mp4", file=io.BytesIO(b"video"))
body, status = dispatcher.handle_stage_upload(file_item)
self.assertEqual(status, HTTPStatus.CREATED)
self.assertEqual(body["filename"], "incoming.mp4")
def test_handle_create_task_maps_module_error_to_conflict(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
repo = FakeRepo()
def raise_module_error(path, settings): # type: ignore[no-untyped-def]
raise ModuleError()
dispatcher = self._dispatcher(
tmpdir,
repo,
ingest_service=SimpleNamespace(create_task_from_file=raise_module_error),
)
body, status = dispatcher.handle_create_task({"source_path": str(Path(tmpdir) / "source.mp4")})
self.assertEqual(status, HTTPStatus.CONFLICT)
self.assertEqual(body["error"], "conflict")
from __future__ import annotations
import io
import tempfile
import unittest
from http import HTTPStatus
from pathlib import Path
from types import SimpleNamespace
from biliup_next.app.control_plane_post_dispatcher import ControlPlanePostDispatcher
from biliup_next.core.models import Task
class FakeRepo:
def __init__(self) -> None:
self.actions = []
def add_action_record(self, action) -> None: # type: ignore[no-untyped-def]
self.actions.append(action)
class ModuleError(Exception):
def to_dict(self) -> dict[str, object]:
return {"error": "conflict"}
class ControlPlanePostDispatcherTests(unittest.TestCase):
def _dispatcher(self, tmpdir: str, repo: FakeRepo, *, ingest_service: object | None = None) -> ControlPlanePostDispatcher:
state = {
"repo": repo,
"root": Path(tmpdir),
"settings": {
"paths": {"stage_dir": str(Path(tmpdir) / "stage"), "session_dir": str(Path(tmpdir) / "session")},
"ingest": {"stage_min_free_space_mb": 100},
},
"ingest_service": ingest_service or SimpleNamespace(
create_task_from_file=lambda path, settings: Task(
"task-1",
"local_file",
str(path),
"task-title",
"created",
"2026-01-01T00:00:00+00:00",
"2026-01-01T00:00:00+00:00",
)
),
}
return ControlPlanePostDispatcher(
state,
bind_full_video_action=lambda task_id, bvid: {"task_id": task_id, "full_video_bvid": bvid},
merge_session_action=lambda session_key, task_ids: {"session_key": session_key, "task_ids": task_ids},
receive_full_video_webhook=lambda payload: {"ok": True, **payload},
rebind_session_full_video_action=lambda session_key, bvid: {"session_key": session_key, "full_video_bvid": bvid},
reset_to_step_action=lambda task_id, step_name: {"task_id": task_id, "step_name": step_name},
retry_step_action=lambda task_id, step_name: {"task_id": task_id, "step_name": step_name},
run_task_action=lambda task_id: {"task_id": task_id},
run_once=lambda: {"scheduler": {"scan_count": 1}, "worker": {"picked": 1}},
stage_importer_factory=lambda: SimpleNamespace(
import_file=lambda source, dest, min_free_bytes=0: {"imported_to": str(dest / source.name)},
import_upload=lambda filename, fileobj, dest, min_free_bytes=0: {"filename": filename, "dest": str(dest)},
),
systemd_runtime_factory=lambda: SimpleNamespace(act=lambda service, action: {"service": service, "action": action, "command_ok": True}),
)
def test_handle_bind_full_video_maps_missing_bvid(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
dispatcher = self._dispatcher(tmpdir, FakeRepo())
body, status = dispatcher.handle_bind_full_video("task-1", {})
self.assertEqual(status, HTTPStatus.BAD_REQUEST)
self.assertEqual(body["error"], "missing full_video_bvid")
def test_handle_worker_run_once_records_action(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
repo = FakeRepo()
dispatcher = self._dispatcher(tmpdir, repo)
body, status = dispatcher.handle_worker_run_once()
self.assertEqual(status, HTTPStatus.ACCEPTED)
self.assertEqual(body["worker"]["picked"], 1)
self.assertEqual(repo.actions[-1].action_name, "worker_run_once")
def test_handle_stage_upload_returns_created(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
dispatcher = self._dispatcher(tmpdir, FakeRepo())
file_item = SimpleNamespace(filename="incoming.mp4", file=io.BytesIO(b"video"))
body, status = dispatcher.handle_stage_upload(file_item)
self.assertEqual(status, HTTPStatus.CREATED)
self.assertEqual(body["filename"], "incoming.mp4")
def test_handle_create_task_maps_module_error_to_conflict(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
repo = FakeRepo()
def raise_module_error(path, settings): # type: ignore[no-untyped-def]
raise ModuleError()
dispatcher = self._dispatcher(
tmpdir,
repo,
ingest_service=SimpleNamespace(create_task_from_file=raise_module_error),
)
body, status = dispatcher.handle_create_task({"source_path": str(Path(tmpdir) / "source.mp4")})
self.assertEqual(status, HTTPStatus.CONFLICT)
self.assertEqual(body["error"], "conflict")

View File

@ -0,0 +1,277 @@
from __future__ import annotations
import json
import tempfile
import unittest
from pathlib import Path
from types import SimpleNamespace
from unittest.mock import patch
from biliup_next.core.errors import ModuleError
from biliup_next.core.models import Artifact, Task
from biliup_next.modules.transcribe.providers.groq import GroqTranscribeProvider
class _FakeResponse:
def __init__(self, segments):
self.segments = segments
class _FakeTranscriptions:
def __init__(self, outcomes: list[object]) -> None:
self.outcomes = list(outcomes)
self.calls: list[dict[str, object]] = []
def create(self, **kwargs): # noqa: ANN003
self.calls.append(kwargs)
outcome = self.outcomes.pop(0)
if isinstance(outcome, Exception):
raise outcome
return outcome
class _FakeGroqClient:
def __init__(self, outcomes: list[object]) -> None:
self.audio = SimpleNamespace(transcriptions=_FakeTranscriptions(outcomes))
class GroqTranscribeProviderTests(unittest.TestCase):
def test_transcribe_retries_timeout_and_writes_srt_atomically(self) -> None:
provider = GroqTranscribeProvider()
task = Task("task-1", "local_file", "/tmp/input.mp4", "demo", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
source_path = work_dir / "input.mp4"
source_path.write_bytes(b"video")
source_video = Artifact(None, task.id, "source_video", str(source_path), "{}", "2026-01-01T00:00:00+00:00")
segment = work_dir / "temp_audio" / "part_000.mp3"
def fake_extract_audio_segments(**kwargs): # noqa: ANN003
segment.parent.mkdir(parents=True, exist_ok=True)
segment.write_bytes(b"audio")
client = _FakeGroqClient(
[
RuntimeError("Request timed out."),
_FakeResponse([{"start": 0, "end": 1.2, "text": "hello"}]),
]
)
settings = {
"groq_api_key": "gsk_test",
"ffmpeg_bin": "ffmpeg",
"max_file_size_mb": 23,
"request_timeout_seconds": 33,
"request_max_retries": 1,
"request_retry_backoff_seconds": 0,
"serialize_groq_requests": False,
}
with patch("groq.Groq", return_value=client) as groq_ctor:
with patch.object(provider, "_extract_audio_segments", side_effect=fake_extract_audio_segments):
artifact = provider.transcribe(task, source_video, settings)
self.assertEqual(Path(artifact.path).read_text(encoding="utf-8"), "1\n00:00:00,000 --> 00:00:01,199\nhello\n\n")
self.assertFalse((work_dir / ".demo.srt.tmp").exists())
self.assertEqual(len(client.audio.transcriptions.calls), 2)
self.assertEqual(client.audio.transcriptions.calls[0]["timeout"], 33)
self.assertTrue((work_dir / "transcribe_segments" / "part_000.json").exists())
groq_ctor.assert_called_once_with(api_key="gsk_test", timeout=33, max_retries=0)
def test_transcribe_reuses_completed_segment_checkpoints(self) -> None:
provider = GroqTranscribeProvider()
task = Task("task-1", "local_file", "/tmp/input.mp4", "demo", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
source_path = work_dir / "input.mp4"
source_path.write_bytes(b"video")
source_video = Artifact(None, task.id, "source_video", str(source_path), "{}", "2026-01-01T00:00:00+00:00")
segments = [work_dir / "temp_audio" / "part_000.mp3", work_dir / "temp_audio" / "part_001.mp3"]
checkpoint_dir = work_dir / "transcribe_segments"
checkpoint_dir.mkdir()
(checkpoint_dir / "part_000.json").write_text(
json.dumps(
{
"provider": "groq",
"model": "whisper-large-v3-turbo",
"language": "zh",
"audio_file": "part_000.mp3",
"segment_duration_seconds": 75,
"segments": [{"start": 0, "end": 1, "text": "first"}],
}
),
encoding="utf-8",
)
def fake_extract_audio_segments(**kwargs): # noqa: ANN003
for segment in segments:
segment.parent.mkdir(parents=True, exist_ok=True)
segment.write_bytes(b"audio")
client = _FakeGroqClient([_FakeResponse([{"start": 0, "end": 1.5, "text": "second"}])])
settings = {
"groq_api_key": "gsk_test",
"ffmpeg_bin": "ffmpeg",
"max_file_size_mb": 23,
"request_timeout_seconds": 33,
"request_max_retries": 1,
"request_retry_backoff_seconds": 0,
"serialize_groq_requests": False,
}
with patch("groq.Groq", return_value=client):
with patch.object(provider, "_initial_segment_duration", return_value=75):
with patch.object(provider, "_extract_audio_segments", side_effect=fake_extract_audio_segments):
artifact = provider.transcribe(task, source_video, settings)
srt = Path(artifact.path).read_text(encoding="utf-8")
self.assertIn("00:00:00,000 --> 00:00:01,000\nfirst", srt)
self.assertIn("00:01:15,000 --> 00:01:16,500\nsecond", srt)
self.assertEqual(len(client.audio.transcriptions.calls), 1)
self.assertEqual(client.audio.transcriptions.calls[0]["file"][0], "part_001.mp3")
self.assertTrue((checkpoint_dir / "part_001.json").exists())
def test_transcribe_switches_to_next_api_key_on_rate_limit(self) -> None:
provider = GroqTranscribeProvider()
task = Task("task-1", "local_file", "/tmp/input.mp4", "demo", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
source_path = work_dir / "input.mp4"
source_path.write_bytes(b"video")
source_video = Artifact(None, task.id, "source_video", str(source_path), "{}", "2026-01-01T00:00:00+00:00")
segment = work_dir / "temp_audio" / "part_000.mp3"
def fake_extract_audio_segments(**kwargs): # noqa: ANN003
segment.parent.mkdir(parents=True, exist_ok=True)
segment.write_bytes(b"audio")
limited_client = _FakeGroqClient([RuntimeError("Error code: 429 rate_limit")])
fallback_client = _FakeGroqClient([_FakeResponse([{"start": 0, "end": 1.2, "text": "fallback"}])])
settings = {
"groq_api_key": "",
"groq_api_keys": ["gsk_first", "gsk_second"],
"ffmpeg_bin": "ffmpeg",
"max_file_size_mb": 23,
"request_timeout_seconds": 20,
"request_max_retries": 0,
"request_retry_backoff_seconds": 0,
"serialize_groq_requests": False,
}
with patch("groq.Groq", side_effect=[limited_client, fallback_client]) as groq_ctor:
with patch.object(provider, "_extract_audio_segments", side_effect=fake_extract_audio_segments):
artifact = provider.transcribe(task, source_video, settings)
self.assertIn("fallback", Path(artifact.path).read_text(encoding="utf-8"))
self.assertEqual(len(limited_client.audio.transcriptions.calls), 1)
self.assertEqual(len(fallback_client.audio.transcriptions.calls), 1)
self.assertEqual([call.kwargs["api_key"] for call in groq_ctor.call_args_list], ["gsk_first", "gsk_second"])
def test_transcribe_waits_after_all_api_keys_are_rate_limited(self) -> None:
provider = GroqTranscribeProvider()
task = Task("task-1", "local_file", "/tmp/input.mp4", "demo", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
source_path = work_dir / "input.mp4"
source_path.write_bytes(b"video")
source_video = Artifact(None, task.id, "source_video", str(source_path), "{}", "2026-01-01T00:00:00+00:00")
segment = work_dir / "temp_audio" / "part_000.mp3"
def fake_extract_audio_segments(**kwargs): # noqa: ANN003
segment.parent.mkdir(parents=True, exist_ok=True)
segment.write_bytes(b"audio")
first_client = _FakeGroqClient([RuntimeError("429 rate_limit"), _FakeResponse([{"start": 0, "end": 1, "text": "retry ok"}])])
second_client = _FakeGroqClient([RuntimeError("429 rate_limit")])
settings = {
"groq_api_key": "",
"groq_api_keys": ["gsk_first", "gsk_second"],
"ffmpeg_bin": "ffmpeg",
"max_file_size_mb": 23,
"request_timeout_seconds": 20,
"request_max_retries": 1,
"request_retry_backoff_seconds": 7,
"serialize_groq_requests": False,
}
with patch("groq.Groq", side_effect=[first_client, second_client]):
with patch("time.sleep") as sleep_mock:
with patch.object(provider, "_extract_audio_segments", side_effect=fake_extract_audio_segments):
artifact = provider.transcribe(task, source_video, settings)
self.assertIn("retry ok", Path(artifact.path).read_text(encoding="utf-8"))
sleep_mock.assert_called_once_with(7)
self.assertEqual(len(first_client.audio.transcriptions.calls), 2)
self.assertEqual(len(second_client.audio.transcriptions.calls), 1)
def test_transcribe_raises_after_retry_budget_is_exhausted(self) -> None:
provider = GroqTranscribeProvider()
task = Task("task-1", "local_file", "/tmp/input.mp4", "demo", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
source_path = work_dir / "input.mp4"
source_path.write_bytes(b"video")
source_video = Artifact(None, task.id, "source_video", str(source_path), "{}", "2026-01-01T00:00:00+00:00")
segment = work_dir / "temp_audio" / "part_000.mp3"
def fake_extract_audio_segments(**kwargs): # noqa: ANN003
segment.parent.mkdir(parents=True, exist_ok=True)
segment.write_bytes(b"audio")
client = _FakeGroqClient([RuntimeError("Connection error."), RuntimeError("Connection error.")])
settings = {
"groq_api_key": "gsk_test",
"ffmpeg_bin": "ffmpeg",
"max_file_size_mb": 23,
"request_timeout_seconds": 20,
"request_max_retries": 1,
"request_retry_backoff_seconds": 0,
"serialize_groq_requests": False,
}
with patch("groq.Groq", return_value=client):
with patch.object(provider, "_extract_audio_segments", side_effect=fake_extract_audio_segments):
with self.assertRaises(ModuleError) as exc_info:
provider.transcribe(task, source_video, settings)
self.assertEqual(exc_info.exception.message, "Groq 转录失败: part_000.mp3")
def test_initial_segment_duration_keeps_safety_margin(self) -> None:
self.assertLess(GroqTranscribeProvider._initial_segment_duration(12), 1536)
def test_extract_audio_segments_retries_when_segment_exceeds_size_limit(self) -> None:
provider = GroqTranscribeProvider()
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
temp_audio_dir = work_dir / "temp_audio"
temp_audio_dir.mkdir()
output_pattern = temp_audio_dir / "part_%03d.mp3"
durations: list[int] = []
def fake_extract_audio_segments(**kwargs): # noqa: ANN003
durations.append(int(kwargs["segment_duration"]))
size = 20 if len(durations) == 1 else 5
(temp_audio_dir / "part_000.mp3").write_bytes(b"x" * size)
with patch.object(provider, "_extract_audio_segments", side_effect=fake_extract_audio_segments):
result = provider._extract_audio_segments_with_size_guard(
ffmpeg_bin="ffmpeg",
source_path=work_dir / "input.mp4",
output_pattern=output_pattern,
temp_audio_dir=temp_audio_dir,
initial_segment_duration=100,
max_segment_bytes=10,
)
self.assertEqual(durations, [100, 75])
self.assertEqual(result, 75)
if __name__ == "__main__":
unittest.main()

View File

@ -1,49 +1,49 @@
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from biliup_next.modules.ingest.providers.bilibili_url import BilibiliUrlIngestProvider
class FakeYtDlpAdapter:
def probe(self, *, yt_dlp_cmd: str, source_url: str): # noqa: ANN001
return {
"id": "BV1TEST1234",
"title": "测试视频标题",
"uploader": "测试主播",
"duration": 321.0,
}
def download(self, *, yt_dlp_cmd: str, source_url: str, output_template: str, format_selector=None): # noqa: ANN001
output_path = Path(output_template.replace("%(ext)s", "mp4"))
output_path.parent.mkdir(parents=True, exist_ok=True)
output_path.write_bytes(b"fake-video")
return type("Result", (), {"returncode": 0, "stdout": "ok", "stderr": ""})()
class BilibiliUrlIngestProviderTests(unittest.TestCase):
def test_resolve_and_download_source(self) -> None:
provider = BilibiliUrlIngestProvider(yt_dlp=FakeYtDlpAdapter())
settings = {"yt_dlp_cmd": "yt-dlp"}
resolved = provider.resolve_source("https://www.bilibili.com/video/BV1TEST1234", settings)
self.assertEqual(resolved["video_id"], "BV1TEST1234")
self.assertEqual(resolved["title"], "测试视频标题")
self.assertEqual(resolved["streamer"], "测试主播")
with tempfile.TemporaryDirectory() as tmpdir:
downloaded = provider.download_source(
"https://www.bilibili.com/video/BV1TEST1234",
Path(tmpdir),
settings,
task_id=str(resolved["task_id"]),
)
self.assertTrue(downloaded.exists())
self.assertEqual(downloaded.suffix, ".mp4")
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from biliup_next.modules.ingest.providers.bilibili_url import BilibiliUrlIngestProvider
class FakeYtDlpAdapter:
def probe(self, *, yt_dlp_cmd: str, source_url: str): # noqa: ANN001
return {
"id": "BV1TEST1234",
"title": "测试视频标题",
"uploader": "测试主播",
"duration": 321.0,
}
def download(self, *, yt_dlp_cmd: str, source_url: str, output_template: str, format_selector=None): # noqa: ANN001
output_path = Path(output_template.replace("%(ext)s", "mp4"))
output_path.parent.mkdir(parents=True, exist_ok=True)
output_path.write_bytes(b"fake-video")
return type("Result", (), {"returncode": 0, "stdout": "ok", "stderr": ""})()
class BilibiliUrlIngestProviderTests(unittest.TestCase):
def test_resolve_and_download_source(self) -> None:
provider = BilibiliUrlIngestProvider(yt_dlp=FakeYtDlpAdapter())
settings = {"yt_dlp_cmd": "yt-dlp"}
resolved = provider.resolve_source("https://www.bilibili.com/video/BV1TEST1234", settings)
self.assertEqual(resolved["video_id"], "BV1TEST1234")
self.assertEqual(resolved["title"], "测试视频标题")
self.assertEqual(resolved["streamer"], "测试主播")
with tempfile.TemporaryDirectory() as tmpdir:
downloaded = provider.download_source(
"https://www.bilibili.com/video/BV1TEST1234",
Path(tmpdir),
settings,
task_id=str(resolved["task_id"]),
)
self.assertTrue(downloaded.exists())
self.assertEqual(downloaded.suffix, ".mp4")
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,85 @@
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from biliup_next.core.providers import ProviderManifest
from biliup_next.core.registry import Registry
from biliup_next.infra.db import Database
from biliup_next.infra.task_repository import TaskRepository
from biliup_next.modules.ingest.service import IngestService
class _FakeLocalFileProvider:
manifest = ProviderManifest(
id="local_file",
name="Fake Local File Ingest",
version="0.1.0",
provider_type="ingest_provider",
entrypoint="tests.test_ingest_scan_stage:_FakeLocalFileProvider",
capabilities=["ingest"],
enabled_by_default=True,
)
def validate_source(self, source_path: Path, settings: dict[str, object]) -> None:
if not source_path.exists() or not source_path.is_file():
raise AssertionError(f"unexpected source path: {source_path}")
class IngestScanStageTests(unittest.TestCase):
def setUp(self) -> None:
self.tempdir = tempfile.TemporaryDirectory()
root = Path(self.tempdir.name)
self.stage_dir = root / "stage"
self.backup_dir = root / "backup"
self.session_dir = root / "session"
self.stage_dir.mkdir()
self.backup_dir.mkdir()
self.session_dir.mkdir()
db = Database(root / "test.db")
db.initialize()
repo = TaskRepository(db)
registry = Registry()
provider = _FakeLocalFileProvider()
registry.register("ingest_provider", "local_file", provider, provider.manifest)
self.service = IngestService(registry=registry, repo=repo)
def tearDown(self) -> None:
self.tempdir.cleanup()
def test_scan_stage_uses_moved_file_for_reference_timestamp(self) -> None:
source_path = self.stage_dir / "王海颖唱歌录播 04月14日 17时49分.mp4"
source_path.write_bytes(b"fake-video")
settings = {
"provider": "local_file",
"stage_dir": str(self.stage_dir),
"backup_dir": str(self.backup_dir),
"session_dir": str(self.session_dir),
"allowed_extensions": [".mp4"],
"ffprobe_bin": "ffprobe",
"min_duration_seconds": 0,
"stability_wait_seconds": 0,
"meta_sidecar_enabled": True,
}
self.service._probe_duration_seconds = lambda *_args, **_kwargs: 120.0 # type: ignore[method-assign]
result = self.service.scan_stage(settings)
self.assertEqual(len(result["accepted"]), 1)
accepted = result["accepted"][0]
moved_path = Path(str(accepted["source_path"]))
self.assertTrue(moved_path.exists())
self.assertFalse(source_path.exists())
task = self.service.repo.get_task(moved_path.stem)
self.assertIsNotNone(task)
context = self.service.repo.get_task_context(moved_path.stem)
self.assertIsNotNone(context)
self.assertIsNotNone(context.segment_started_at)
if __name__ == "__main__":
unittest.main()

View File

@ -1,61 +1,61 @@
from __future__ import annotations
import unittest
from biliup_next.core.models import Task, TaskContext
from biliup_next.modules.ingest.service import IngestService
class _FakeRepo:
def __init__(self, contexts: list[TaskContext]) -> None:
self.contexts = contexts
def find_recent_task_contexts(self, streamer: str) -> list[TaskContext]:
return [context for context in self.contexts if context.streamer == streamer]
class IngestSessionGroupingTests(unittest.TestCase):
def test_infer_session_key_groups_same_streamer_within_three_hours_to_earliest_title(self) -> None:
existing_context = TaskContext(
id=None,
task_id="task-1",
session_key="王海颖唱歌录播 04月04日 21时59分 p01 王海颖唱歌录播 04月04日 21时59分",
streamer="王海颖唱歌录播",
room_id=None,
source_title="王海颖唱歌录播 04月04日 21时59分 p01 王海颖唱歌录播 04月04日 21时59分",
segment_started_at="2026-04-04T21:59:00+08:00",
segment_duration_seconds=None,
full_video_bvid="BVFULL123",
created_at="2026-04-04T14:00:00+00:00",
updated_at="2026-04-04T14:00:00+00:00",
)
service = IngestService(registry=None, repo=_FakeRepo([existing_context])) # type: ignore[arg-type]
session_key, inherited_bvid = service._infer_session_key(
streamer="王海颖唱歌录播",
room_id=None,
segment_started_at="2026-04-05T00:30:00+08:00",
source_title="王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分",
gap_minutes=60,
)
self.assertEqual(session_key, existing_context.session_key)
self.assertEqual(inherited_bvid, "BVFULL123")
def test_infer_session_key_uses_current_title_when_no_recent_context_matches(self) -> None:
service = IngestService(registry=None, repo=_FakeRepo([])) # type: ignore[arg-type]
session_key, inherited_bvid = service._infer_session_key(
streamer="王海颖唱歌录播",
room_id=None,
segment_started_at="2026-04-05T00:30:00+08:00",
source_title="王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分",
gap_minutes=60,
)
self.assertEqual(session_key, "王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分")
self.assertIsNone(inherited_bvid)
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import unittest
from biliup_next.core.models import Task, TaskContext
from biliup_next.modules.ingest.service import IngestService
class _FakeRepo:
def __init__(self, contexts: list[TaskContext]) -> None:
self.contexts = contexts
def find_recent_task_contexts(self, streamer: str) -> list[TaskContext]:
return [context for context in self.contexts if context.streamer == streamer]
class IngestSessionGroupingTests(unittest.TestCase):
def test_infer_session_key_groups_same_streamer_within_three_hours_to_earliest_title(self) -> None:
existing_context = TaskContext(
id=None,
task_id="task-1",
session_key="王海颖唱歌录播 04月04日 21时59分 p01 王海颖唱歌录播 04月04日 21时59分",
streamer="王海颖唱歌录播",
room_id=None,
source_title="王海颖唱歌录播 04月04日 21时59分 p01 王海颖唱歌录播 04月04日 21时59分",
segment_started_at="2026-04-04T21:59:00+08:00",
segment_duration_seconds=None,
full_video_bvid="BVFULL123",
created_at="2026-04-04T14:00:00+00:00",
updated_at="2026-04-04T14:00:00+00:00",
)
service = IngestService(registry=None, repo=_FakeRepo([existing_context])) # type: ignore[arg-type]
session_key, inherited_bvid = service._infer_session_key(
streamer="王海颖唱歌录播",
room_id=None,
segment_started_at="2026-04-05T00:30:00+08:00",
source_title="王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分",
gap_minutes=60,
)
self.assertEqual(session_key, existing_context.session_key)
self.assertEqual(inherited_bvid, "BVFULL123")
def test_infer_session_key_uses_current_title_when_no_recent_context_matches(self) -> None:
service = IngestService(registry=None, repo=_FakeRepo([])) # type: ignore[arg-type]
session_key, inherited_bvid = service._infer_session_key(
streamer="王海颖唱歌录播",
room_id=None,
segment_started_at="2026-04-05T00:30:00+08:00",
source_title="王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分",
gap_minutes=60,
)
self.assertEqual(session_key, "王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分")
self.assertIsNone(inherited_bvid)
if __name__ == "__main__":
unittest.main()

View File

@ -1,162 +1,162 @@
from __future__ import annotations
import json
import tempfile
import unittest
from pathlib import Path
from biliup_next.core.models import Artifact, PublishRecord, Task, TaskContext, TaskStep
from biliup_next.modules.publish.service import PublishService
class _FakePublishProvider:
def __init__(self) -> None:
self.calls: list[tuple[str, list[str], dict[str, object]]] = []
def publish(self, task: Task, clip_videos: list[Artifact], settings: dict[str, object]) -> PublishRecord:
self.calls.append((task.id, [artifact.path for artifact in clip_videos], dict(settings)))
return PublishRecord(
id=None,
task_id=task.id,
platform="bilibili",
aid=None,
bvid="BV1SESSION123",
title=task.title,
published_at="2026-01-01T00:00:00+00:00",
)
class _FakeRegistry:
def __init__(self, provider) -> None: # noqa: ANN001
self.provider = provider
def get(self, provider_type: str, provider_id: str): # noqa: ANN001
return self.provider
class _FakeRepo:
def __init__(self, tasks: list[Task], contexts: list[TaskContext], artifacts: dict[str, list[Artifact]]) -> None:
self.tasks = {task.id: task for task in tasks}
self.contexts = {context.task_id: context for context in contexts}
self.artifacts = artifacts
self.publish_records: list[PublishRecord] = []
self.step_updates: list[tuple[str, str, str]] = []
self.task_updates: list[tuple[str, str]] = []
def get_task(self, task_id: str) -> Task | None:
return self.tasks.get(task_id)
def list_artifacts(self, task_id: str) -> list[Artifact]:
return list(self.artifacts.get(task_id, []))
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.contexts.get(task_id)
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
return [context for context in self.contexts.values() if context.session_key == session_key]
def add_publish_record(self, record: PublishRecord) -> None:
self.publish_records.append(record)
def add_artifact(self, artifact: Artifact) -> None:
self.artifacts.setdefault(artifact.task_id, []).append(artifact)
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # noqa: ANN001
self.step_updates.append((task_id, step_name, status))
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status))
task = self.tasks[task_id]
self.tasks[task_id] = Task(task.id, task.source_type, task.source_path, task.title, status, task.created_at, updated_at)
class PublishServiceTests(unittest.TestCase):
def test_anchor_task_publishes_aggregated_session_clips_and_marks_all_tasks_published(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
(root / "task-1").mkdir(parents=True, exist_ok=True)
(root / "task-2").mkdir(parents=True, exist_ok=True)
task1 = Task("task-1", "local_file", str(root / "task-1" / "source.mp4"), "task-1", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", str(root / "task-2" / "source.mp4"), "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
artifacts = {
"task-1": [Artifact(None, "task-1", "clip_video", str(root / "a1.mp4"), "{}", task1.created_at)],
"task-2": [Artifact(None, "task-2", "clip_video", str(root / "b1.mp4"), "{}", task2.created_at)],
}
repo = _FakeRepo([task1, task2], [ctx1, ctx2], artifacts)
service = PublishService(_FakeRegistry(provider), repo)
record = service.run("task-1", {"provider": "biliup_cli", "session_dir": str(root)})
self.assertEqual(record.bvid, "BV1SESSION123")
self.assertEqual(provider.calls[0][0], "task-1")
self.assertEqual(provider.calls[0][1], [str(root / "a1.mp4"), str(root / "b1.mp4")])
aggregate_settings = provider.calls[0][2]
aggregate_txt = Path(str(aggregate_settings["publish_songs_txt_path"]))
aggregate_json = Path(str(aggregate_settings["publish_songs_json_path"]))
self.assertTrue(aggregate_txt.exists())
self.assertTrue(aggregate_json.exists())
self.assertIn(("task-1", "published"), repo.task_updates)
self.assertIn(("task-2", "published"), repo.task_updates)
self.assertEqual(len(repo.publish_records), 2)
self.assertTrue((root / "task-1" / "bvid.txt").exists())
self.assertTrue((root / "task-2" / "bvid.txt").exists())
def test_non_anchor_task_reuses_existing_session_bvid_without_republishing(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
(root / "task-1").mkdir(parents=True, exist_ok=True)
(root / "task-1" / "bvid.txt").write_text("BV1SESSION123", encoding="utf-8")
(root / "task-2").mkdir(parents=True, exist_ok=True)
task1 = Task("task-1", "local_file", str(root / "task-1" / "source.mp4"), "task-1", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", str(root / "task-2" / "source.mp4"), "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
repo = _FakeRepo([task1, task2], [ctx1, ctx2], {"task-2": []})
service = PublishService(_FakeRegistry(provider), repo)
record = service.run("task-2", {"provider": "biliup_cli", "session_dir": str(root)})
self.assertEqual(record.bvid, "BV1SESSION123")
self.assertEqual(provider.calls, [])
self.assertIn(("task-2", "published"), repo.task_updates)
self.assertTrue((root / "task-2" / "bvid.txt").exists())
def test_session_publish_aggregates_song_lists_for_provider_metadata(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
(root / "task-1").mkdir(parents=True, exist_ok=True)
(root / "task-2").mkdir(parents=True, exist_ok=True)
(root / "task-1" / "songs.txt").write_text("00:00:00 Song A — Artist A\n", encoding="utf-8")
(root / "task-2" / "songs.txt").write_text("00:00:00 Song B — Artist B\n", encoding="utf-8")
(root / "task-1" / "songs.json").write_text('{"songs":[{"title":"Song A"},{"title":"Song A2"}]}\n', encoding="utf-8")
(root / "task-2" / "songs.json").write_text('{"songs":[{"title":"Song B"}]}\n', encoding="utf-8")
task1 = Task("task-1", "local_file", str(root / "task-1" / "source.mp4"), "task-1", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", str(root / "task-2" / "source.mp4"), "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
artifacts = {
"task-1": [Artifact(None, "task-1", "clip_video", str(root / "a1.mp4"), "{}", task1.created_at)],
"task-2": [Artifact(None, "task-2", "clip_video", str(root / "b1.mp4"), "{}", task2.created_at)],
}
repo = _FakeRepo([task1, task2], [ctx1, ctx2], artifacts)
service = PublishService(_FakeRegistry(provider), repo)
service.run("task-1", {"provider": "biliup_cli", "session_dir": str(root)})
settings = provider.calls[0][2]
aggregate_txt = Path(str(settings["publish_songs_txt_path"])).read_text(encoding="utf-8")
aggregate_json = Path(str(settings["publish_songs_json_path"])).read_text(encoding="utf-8")
self.assertIn("P1:", aggregate_txt)
self.assertIn("Song A — Artist A", aggregate_txt)
self.assertIn("P2:", aggregate_txt)
self.assertIn("Song B — Artist B", aggregate_txt)
self.assertEqual(len(json.loads(aggregate_json)["songs"]), 3)
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import json
import tempfile
import unittest
from pathlib import Path
from biliup_next.core.models import Artifact, PublishRecord, Task, TaskContext, TaskStep
from biliup_next.modules.publish.service import PublishService
class _FakePublishProvider:
def __init__(self) -> None:
self.calls: list[tuple[str, list[str], dict[str, object]]] = []
def publish(self, task: Task, clip_videos: list[Artifact], settings: dict[str, object]) -> PublishRecord:
self.calls.append((task.id, [artifact.path for artifact in clip_videos], dict(settings)))
return PublishRecord(
id=None,
task_id=task.id,
platform="bilibili",
aid=None,
bvid="BV1SESSION123",
title=task.title,
published_at="2026-01-01T00:00:00+00:00",
)
class _FakeRegistry:
def __init__(self, provider) -> None: # noqa: ANN001
self.provider = provider
def get(self, provider_type: str, provider_id: str): # noqa: ANN001
return self.provider
class _FakeRepo:
def __init__(self, tasks: list[Task], contexts: list[TaskContext], artifacts: dict[str, list[Artifact]]) -> None:
self.tasks = {task.id: task for task in tasks}
self.contexts = {context.task_id: context for context in contexts}
self.artifacts = artifacts
self.publish_records: list[PublishRecord] = []
self.step_updates: list[tuple[str, str, str]] = []
self.task_updates: list[tuple[str, str]] = []
def get_task(self, task_id: str) -> Task | None:
return self.tasks.get(task_id)
def list_artifacts(self, task_id: str) -> list[Artifact]:
return list(self.artifacts.get(task_id, []))
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.contexts.get(task_id)
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
return [context for context in self.contexts.values() if context.session_key == session_key]
def add_publish_record(self, record: PublishRecord) -> None:
self.publish_records.append(record)
def add_artifact(self, artifact: Artifact) -> None:
self.artifacts.setdefault(artifact.task_id, []).append(artifact)
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # noqa: ANN001
self.step_updates.append((task_id, step_name, status))
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status))
task = self.tasks[task_id]
self.tasks[task_id] = Task(task.id, task.source_type, task.source_path, task.title, status, task.created_at, updated_at)
class PublishServiceTests(unittest.TestCase):
def test_anchor_task_publishes_aggregated_session_clips_and_marks_all_tasks_published(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
(root / "task-1").mkdir(parents=True, exist_ok=True)
(root / "task-2").mkdir(parents=True, exist_ok=True)
task1 = Task("task-1", "local_file", str(root / "task-1" / "source.mp4"), "task-1", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", str(root / "task-2" / "source.mp4"), "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
artifacts = {
"task-1": [Artifact(None, "task-1", "clip_video", str(root / "a1.mp4"), "{}", task1.created_at)],
"task-2": [Artifact(None, "task-2", "clip_video", str(root / "b1.mp4"), "{}", task2.created_at)],
}
repo = _FakeRepo([task1, task2], [ctx1, ctx2], artifacts)
service = PublishService(_FakeRegistry(provider), repo)
record = service.run("task-1", {"provider": "biliup_cli", "session_dir": str(root)})
self.assertEqual(record.bvid, "BV1SESSION123")
self.assertEqual(provider.calls[0][0], "task-1")
self.assertEqual(provider.calls[0][1], [str(root / "a1.mp4"), str(root / "b1.mp4")])
aggregate_settings = provider.calls[0][2]
aggregate_txt = Path(str(aggregate_settings["publish_songs_txt_path"]))
aggregate_json = Path(str(aggregate_settings["publish_songs_json_path"]))
self.assertTrue(aggregate_txt.exists())
self.assertTrue(aggregate_json.exists())
self.assertIn(("task-1", "published"), repo.task_updates)
self.assertIn(("task-2", "published"), repo.task_updates)
self.assertEqual(len(repo.publish_records), 2)
self.assertTrue((root / "task-1" / "bvid.txt").exists())
self.assertTrue((root / "task-2" / "bvid.txt").exists())
def test_non_anchor_task_reuses_existing_session_bvid_without_republishing(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
(root / "task-1").mkdir(parents=True, exist_ok=True)
(root / "task-1" / "bvid.txt").write_text("BV1SESSION123", encoding="utf-8")
(root / "task-2").mkdir(parents=True, exist_ok=True)
task1 = Task("task-1", "local_file", str(root / "task-1" / "source.mp4"), "task-1", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", str(root / "task-2" / "source.mp4"), "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
repo = _FakeRepo([task1, task2], [ctx1, ctx2], {"task-2": []})
service = PublishService(_FakeRegistry(provider), repo)
record = service.run("task-2", {"provider": "biliup_cli", "session_dir": str(root)})
self.assertEqual(record.bvid, "BV1SESSION123")
self.assertEqual(provider.calls, [])
self.assertIn(("task-2", "published"), repo.task_updates)
self.assertTrue((root / "task-2" / "bvid.txt").exists())
def test_session_publish_aggregates_song_lists_for_provider_metadata(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
(root / "task-1").mkdir(parents=True, exist_ok=True)
(root / "task-2").mkdir(parents=True, exist_ok=True)
(root / "task-1" / "songs.txt").write_text("00:00:00 Song A — Artist A\n", encoding="utf-8")
(root / "task-2" / "songs.txt").write_text("00:00:00 Song B — Artist B\n", encoding="utf-8")
(root / "task-1" / "songs.json").write_text('{"songs":[{"title":"Song A"},{"title":"Song A2"}]}\n', encoding="utf-8")
(root / "task-2" / "songs.json").write_text('{"songs":[{"title":"Song B"}]}\n', encoding="utf-8")
task1 = Task("task-1", "local_file", str(root / "task-1" / "source.mp4"), "task-1", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", str(root / "task-2" / "source.mp4"), "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
artifacts = {
"task-1": [Artifact(None, "task-1", "clip_video", str(root / "a1.mp4"), "{}", task1.created_at)],
"task-2": [Artifact(None, "task-2", "clip_video", str(root / "b1.mp4"), "{}", task2.created_at)],
}
repo = _FakeRepo([task1, task2], [ctx1, ctx2], artifacts)
service = PublishService(_FakeRegistry(provider), repo)
service.run("task-1", {"provider": "biliup_cli", "session_dir": str(root)})
settings = provider.calls[0][2]
aggregate_txt = Path(str(settings["publish_songs_txt_path"])).read_text(encoding="utf-8")
aggregate_json = Path(str(settings["publish_songs_json_path"])).read_text(encoding="utf-8")
self.assertIn("P1:", aggregate_txt)
self.assertIn("Song A — Artist A", aggregate_txt)
self.assertIn("P2:", aggregate_txt)
self.assertIn("Song B — Artist B", aggregate_txt)
self.assertEqual(len(json.loads(aggregate_json)["songs"]), 3)
if __name__ == "__main__":
unittest.main()

View File

@ -1,42 +1,42 @@
from __future__ import annotations
import unittest
from types import SimpleNamespace
from biliup_next.app.retry_meta import retry_meta_for_step
class RetryMetaTests(unittest.TestCase):
def test_retry_meta_uses_schedule_minutes(self) -> None:
step = SimpleNamespace(
step_name="publish",
status="failed_retryable",
retry_count=1,
started_at=None,
finished_at="2099-01-01T00:00:00+00:00",
)
payload = retry_meta_for_step(step, {"publish": {"retry_schedule_minutes": [15, 5]}})
self.assertIsNotNone(payload)
self.assertEqual(payload["retry_wait_seconds"], 900)
self.assertFalse(payload["retry_due"])
def test_retry_meta_marks_exhausted_after_schedule_is_consumed(self) -> None:
step = SimpleNamespace(
step_name="comment",
status="failed_retryable",
retry_count=3,
started_at=None,
finished_at="2026-01-01T00:00:00+00:00",
)
payload = retry_meta_for_step(step, {"comment": {"retry_schedule_minutes": [1, 2]}})
self.assertIsNotNone(payload)
self.assertTrue(payload["retry_exhausted"])
self.assertIsNone(payload["next_retry_at"])
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import unittest
from types import SimpleNamespace
from biliup_next.app.retry_meta import retry_meta_for_step
class RetryMetaTests(unittest.TestCase):
def test_retry_meta_uses_schedule_minutes(self) -> None:
step = SimpleNamespace(
step_name="publish",
status="failed_retryable",
retry_count=1,
started_at=None,
finished_at="2099-01-01T00:00:00+00:00",
)
payload = retry_meta_for_step(step, {"publish": {"retry_schedule_minutes": [15, 5]}})
self.assertIsNotNone(payload)
self.assertEqual(payload["retry_wait_seconds"], 900)
self.assertFalse(payload["retry_due"])
def test_retry_meta_marks_exhausted_after_schedule_is_consumed(self) -> None:
step = SimpleNamespace(
step_name="comment",
status="failed_retryable",
retry_count=3,
started_at=None,
finished_at="2026-01-01T00:00:00+00:00",
)
payload = retry_meta_for_step(step, {"comment": {"retry_schedule_minutes": [1, 2]}})
self.assertIsNotNone(payload)
self.assertTrue(payload["retry_exhausted"])
self.assertIsNone(payload["next_retry_at"])
if __name__ == "__main__":
unittest.main()

View File

@ -1,177 +1,177 @@
from __future__ import annotations
import json
import tempfile
import unittest
from pathlib import Path
from biliup_next.app.serializers import ControlPlaneSerializer
from biliup_next.core.models import ActionRecord, Artifact, Task, TaskContext, TaskStep
class FakeSerializerRepo:
def __init__(
self,
*,
task: Task,
context: TaskContext | None = None,
steps: list[TaskStep] | None = None,
artifacts: list[Artifact] | None = None,
actions: list[ActionRecord] | None = None,
) -> None:
self.task = task
self.context = context
self.steps = steps or []
self.artifacts = artifacts or []
self.actions = actions or []
def get_task(self, task_id: str) -> Task | None:
return self.task if task_id == self.task.id else None
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if task_id == self.task.id else None
def list_task_contexts_for_task_ids(self, task_ids: list[str]) -> dict[str, TaskContext]:
if self.context and self.context.task_id in task_ids:
return {self.context.task_id: self.context}
return {}
def list_steps_for_task_ids(self, task_ids: list[str]) -> dict[str, list[TaskStep]]:
if self.task.id in task_ids:
return {self.task.id: list(self.steps)}
return {}
def list_steps(self, task_id: str) -> list[TaskStep]:
return list(self.steps) if task_id == self.task.id else []
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
if self.context and self.context.session_key == session_key:
return [self.context]
return []
def list_artifacts(self, task_id: str) -> list[Artifact]:
return list(self.artifacts) if task_id == self.task.id else []
def list_action_records(self, task_id: str, limit: int = 200) -> list[ActionRecord]:
return list(self.actions)[:limit] if task_id == self.task.id else []
class SerializerTests(unittest.TestCase):
def test_task_payload_includes_context_retry_and_delivery_state(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", str(Path(tmpdir) / "session" / "task-title" / "source.mp4"), "task-title", "running", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00")
session_dir = Path(tmpdir) / "session" / "task-title"
session_dir.mkdir(parents=True, exist_ok=True)
(session_dir / "full_video_bvid.txt").write_text("BVFULL123", encoding="utf-8")
(session_dir / "bvid.txt").write_text("BVSPLIT123", encoding="utf-8")
steps = [
TaskStep(None, "task-1", "publish", "failed_retryable", "ERR", "upload failed", 1, None, "2099-01-01T00:00:00+00:00"),
]
context = TaskContext(
id=None,
task_id="task-1",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="task-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeSerializerRepo(task=task, context=context, steps=steps)
state = {
"repo": repo,
"settings": {
"paths": {"session_dir": str(Path(tmpdir) / "session")},
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
"cleanup": {},
"publish": {"retry_schedule_minutes": [10]},
},
}
payload = ControlPlaneSerializer(state).task_payload("task-1")
self.assertIsNotNone(payload)
self.assertEqual(payload["session_context"]["session_key"], "session-1")
self.assertEqual(payload["session_context"]["full_video_bvid"], "BVFULL123")
self.assertEqual(payload["retry_state"]["step_name"], "publish")
self.assertEqual(payload["delivery_state"]["split_comment"], "pending")
def test_session_payload_reuses_task_payload_serialization(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", str(Path(tmpdir) / "session" / "task-title" / "source.mp4"), "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00")
context = TaskContext(
id=None,
task_id="task-1",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="task-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid="BVFULL123",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeSerializerRepo(task=task, context=context)
state = {
"repo": repo,
"settings": {
"paths": {"session_dir": str(Path(tmpdir) / "session")},
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
"cleanup": {},
"publish": {},
},
}
payload = ControlPlaneSerializer(state).session_payload("session-1")
self.assertIsNotNone(payload)
self.assertEqual(payload["session_key"], "session-1")
self.assertEqual(payload["task_count"], 1)
self.assertEqual(payload["full_video_url"], "https://www.bilibili.com/video/BVFULL123")
self.assertEqual(payload["tasks"][0]["id"], "task-1")
def test_timeline_payload_includes_task_step_artifact_and_action_entries(self) -> None:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00")
steps = [
TaskStep(None, "task-1", "comment", "succeeded", None, None, 0, "2026-01-01T00:01:00+00:00", "2026-01-01T00:01:30+00:00"),
]
artifacts = [
Artifact(None, "task-1", "publish_bvid", "/tmp/bvid.txt", "{}", "2026-01-01T00:01:40+00:00"),
]
actions = [
ActionRecord(
id=None,
task_id="task-1",
action_name="comment",
status="ok",
summary="comment succeeded",
details_json=json.dumps({"split": {"status": "ok"}, "full": {"status": "skipped"}}),
created_at="2026-01-01T00:01:50+00:00",
)
]
repo = FakeSerializerRepo(task=task, steps=steps, artifacts=artifacts, actions=actions)
state = {
"repo": repo,
"settings": {
"paths": {"session_dir": "/tmp/session"},
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
"cleanup": {},
"publish": {},
},
}
payload = ControlPlaneSerializer(state).timeline_payload("task-1")
self.assertIsNotNone(payload)
action_item = next(item for item in payload["items"] if item["kind"] == "action")
self.assertIn("split=ok", action_item["summary"])
kinds = {item["kind"] for item in payload["items"]}
self.assertTrue({"task", "step", "artifact", "action"}.issubset(kinds))
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import json
import tempfile
import unittest
from pathlib import Path
from biliup_next.app.serializers import ControlPlaneSerializer
from biliup_next.core.models import ActionRecord, Artifact, Task, TaskContext, TaskStep
class FakeSerializerRepo:
def __init__(
self,
*,
task: Task,
context: TaskContext | None = None,
steps: list[TaskStep] | None = None,
artifacts: list[Artifact] | None = None,
actions: list[ActionRecord] | None = None,
) -> None:
self.task = task
self.context = context
self.steps = steps or []
self.artifacts = artifacts or []
self.actions = actions or []
def get_task(self, task_id: str) -> Task | None:
return self.task if task_id == self.task.id else None
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if task_id == self.task.id else None
def list_task_contexts_for_task_ids(self, task_ids: list[str]) -> dict[str, TaskContext]:
if self.context and self.context.task_id in task_ids:
return {self.context.task_id: self.context}
return {}
def list_steps_for_task_ids(self, task_ids: list[str]) -> dict[str, list[TaskStep]]:
if self.task.id in task_ids:
return {self.task.id: list(self.steps)}
return {}
def list_steps(self, task_id: str) -> list[TaskStep]:
return list(self.steps) if task_id == self.task.id else []
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
if self.context and self.context.session_key == session_key:
return [self.context]
return []
def list_artifacts(self, task_id: str) -> list[Artifact]:
return list(self.artifacts) if task_id == self.task.id else []
def list_action_records(self, task_id: str, limit: int = 200) -> list[ActionRecord]:
return list(self.actions)[:limit] if task_id == self.task.id else []
class SerializerTests(unittest.TestCase):
def test_task_payload_includes_context_retry_and_delivery_state(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", str(Path(tmpdir) / "session" / "task-title" / "source.mp4"), "task-title", "running", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00")
session_dir = Path(tmpdir) / "session" / "task-title"
session_dir.mkdir(parents=True, exist_ok=True)
(session_dir / "full_video_bvid.txt").write_text("BVFULL123", encoding="utf-8")
(session_dir / "bvid.txt").write_text("BVSPLIT123", encoding="utf-8")
steps = [
TaskStep(None, "task-1", "publish", "failed_retryable", "ERR", "upload failed", 1, None, "2099-01-01T00:00:00+00:00"),
]
context = TaskContext(
id=None,
task_id="task-1",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="task-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeSerializerRepo(task=task, context=context, steps=steps)
state = {
"repo": repo,
"settings": {
"paths": {"session_dir": str(Path(tmpdir) / "session")},
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
"cleanup": {},
"publish": {"retry_schedule_minutes": [10]},
},
}
payload = ControlPlaneSerializer(state).task_payload("task-1")
self.assertIsNotNone(payload)
self.assertEqual(payload["session_context"]["session_key"], "session-1")
self.assertEqual(payload["session_context"]["full_video_bvid"], "BVFULL123")
self.assertEqual(payload["retry_state"]["step_name"], "publish")
self.assertEqual(payload["delivery_state"]["split_comment"], "pending")
def test_session_payload_reuses_task_payload_serialization(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", str(Path(tmpdir) / "session" / "task-title" / "source.mp4"), "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00")
context = TaskContext(
id=None,
task_id="task-1",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="task-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid="BVFULL123",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeSerializerRepo(task=task, context=context)
state = {
"repo": repo,
"settings": {
"paths": {"session_dir": str(Path(tmpdir) / "session")},
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
"cleanup": {},
"publish": {},
},
}
payload = ControlPlaneSerializer(state).session_payload("session-1")
self.assertIsNotNone(payload)
self.assertEqual(payload["session_key"], "session-1")
self.assertEqual(payload["task_count"], 1)
self.assertEqual(payload["full_video_url"], "https://www.bilibili.com/video/BVFULL123")
self.assertEqual(payload["tasks"][0]["id"], "task-1")
def test_timeline_payload_includes_task_step_artifact_and_action_entries(self) -> None:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00")
steps = [
TaskStep(None, "task-1", "comment", "succeeded", None, None, 0, "2026-01-01T00:01:00+00:00", "2026-01-01T00:01:30+00:00"),
]
artifacts = [
Artifact(None, "task-1", "publish_bvid", "/tmp/bvid.txt", "{}", "2026-01-01T00:01:40+00:00"),
]
actions = [
ActionRecord(
id=None,
task_id="task-1",
action_name="comment",
status="ok",
summary="comment succeeded",
details_json=json.dumps({"split": {"status": "ok"}, "full": {"status": "skipped"}}),
created_at="2026-01-01T00:01:50+00:00",
)
]
repo = FakeSerializerRepo(task=task, steps=steps, artifacts=artifacts, actions=actions)
state = {
"repo": repo,
"settings": {
"paths": {"session_dir": "/tmp/session"},
"comment": {"post_split_comment": True, "post_full_video_timeline_comment": True},
"cleanup": {},
"publish": {},
},
}
payload = ControlPlaneSerializer(state).timeline_payload("task-1")
self.assertIsNotNone(payload)
action_item = next(item for item in payload["items"] if item["kind"] == "action")
self.assertIn("split=ok", action_item["summary"])
kinds = {item["kind"] for item in payload["items"]}
self.assertTrue({"task", "step", "artifact", "action"}.issubset(kinds))
if __name__ == "__main__":
unittest.main()

View File

@ -1,143 +1,143 @@
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from biliup_next.app.session_delivery_service import SessionDeliveryService
from biliup_next.core.models import Task, TaskContext
class FakeRepo:
def __init__(self, task: Task, context: TaskContext | None = None, contexts: list[TaskContext] | None = None) -> None:
self.task = task
self.tasks = {task.id: task}
self.context = context
self.contexts = contexts or ([] if context is None else [context])
self.task_context_upserts: list[TaskContext] = []
self.session_binding_upserts = []
self.action_records = []
self.updated_session_bvid: tuple[str, str, str] | None = None
def get_task(self, task_id: str) -> Task | None:
return self.tasks.get(task_id)
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if task_id == self.task.id else None
def upsert_task_context(self, context: TaskContext) -> None:
self.context = context
self.task_context_upserts.append(context)
def upsert_session_binding(self, binding) -> None: # type: ignore[no-untyped-def]
self.session_binding_upserts.append(binding)
def add_action_record(self, record) -> None: # type: ignore[no-untyped-def]
self.action_records.append(record)
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
return [context for context in self.contexts if context.session_key == session_key]
def update_session_full_video_bvid(self, session_key: str, full_video_bvid: str, updated_at: str) -> int:
self.updated_session_bvid = (session_key, full_video_bvid, updated_at)
return len(self.list_task_contexts_by_session_key(session_key))
def list_task_contexts_by_source_title(self, source_title: str) -> list[TaskContext]:
return [context for context in self.contexts if context.source_title == source_title]
class SessionDeliveryServiceTests(unittest.TestCase):
def test_receive_full_video_webhook_updates_binding_context_and_action_record(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
context = TaskContext(
id=None,
task_id="task-1",
session_key="task:task-1",
streamer="streamer",
room_id="room",
source_title="task-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, context=context, contexts=[context])
state = {"repo": repo, "settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}}}
result = SessionDeliveryService(state).receive_full_video_webhook(
{"session_key": "session-1", "source_title": "task-title", "full_video_bvid": "BVWEBHOOK123"}
)
self.assertEqual(result["updated_count"], 1)
self.assertEqual(repo.context.session_key, "session-1")
self.assertEqual(repo.context.full_video_bvid, "BVWEBHOOK123")
self.assertEqual(repo.session_binding_upserts[-1].full_video_bvid, "BVWEBHOOK123")
self.assertEqual(repo.action_records[-1].action_name, "webhook_full_video_uploaded")
persisted_path = Path(result["tasks"][0]["path"])
self.assertTrue(persisted_path.exists())
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVWEBHOOK123")
def test_receive_full_video_webhook_uses_source_title_to_expand_to_session(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
anchor = TaskContext(
id=None,
task_id="task-1",
session_key="session-anchor",
streamer="streamer",
room_id="room",
source_title="anchor-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
sibling = TaskContext(
id=None,
task_id="task-2",
session_key="session-anchor",
streamer="streamer",
room_id="room",
source_title="sibling-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, context=anchor, contexts=[anchor, sibling])
repo.tasks["task-2"] = Task(
"task-2",
"local_file",
"/tmp/source-2.mp4",
"task-title-2",
"published",
"2026-01-01T00:00:00+00:00",
"2026-01-01T00:00:00+00:00",
)
state = {"repo": repo, "settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}}}
result = SessionDeliveryService(state).receive_full_video_webhook(
{"source_title": "anchor-title", "full_video_bvid": "BVWEBHOOK123"}
)
self.assertEqual(result["session_key"], "session-anchor")
self.assertEqual(result["updated_count"], 2)
self.assertTrue(any(binding.session_key == "session-anchor" for binding in repo.session_binding_upserts))
self.assertTrue(any(binding.source_title == "anchor-title" for binding in repo.session_binding_upserts))
def test_merge_session_returns_error_when_task_ids_empty(self) -> None:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
repo = FakeRepo(task)
state = {"repo": repo, "settings": {"paths": {"session_dir": "/tmp/session"}}}
result = SessionDeliveryService(state).merge_session("session-1", ["", " "])
self.assertEqual(result["error"]["code"], "TASK_IDS_EMPTY")
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from biliup_next.app.session_delivery_service import SessionDeliveryService
from biliup_next.core.models import Task, TaskContext
class FakeRepo:
def __init__(self, task: Task, context: TaskContext | None = None, contexts: list[TaskContext] | None = None) -> None:
self.task = task
self.tasks = {task.id: task}
self.context = context
self.contexts = contexts or ([] if context is None else [context])
self.task_context_upserts: list[TaskContext] = []
self.session_binding_upserts = []
self.action_records = []
self.updated_session_bvid: tuple[str, str, str] | None = None
def get_task(self, task_id: str) -> Task | None:
return self.tasks.get(task_id)
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if task_id == self.task.id else None
def upsert_task_context(self, context: TaskContext) -> None:
self.context = context
self.task_context_upserts.append(context)
def upsert_session_binding(self, binding) -> None: # type: ignore[no-untyped-def]
self.session_binding_upserts.append(binding)
def add_action_record(self, record) -> None: # type: ignore[no-untyped-def]
self.action_records.append(record)
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
return [context for context in self.contexts if context.session_key == session_key]
def update_session_full_video_bvid(self, session_key: str, full_video_bvid: str, updated_at: str) -> int:
self.updated_session_bvid = (session_key, full_video_bvid, updated_at)
return len(self.list_task_contexts_by_session_key(session_key))
def list_task_contexts_by_source_title(self, source_title: str) -> list[TaskContext]:
return [context for context in self.contexts if context.source_title == source_title]
class SessionDeliveryServiceTests(unittest.TestCase):
def test_receive_full_video_webhook_updates_binding_context_and_action_record(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
context = TaskContext(
id=None,
task_id="task-1",
session_key="task:task-1",
streamer="streamer",
room_id="room",
source_title="task-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, context=context, contexts=[context])
state = {"repo": repo, "settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}}}
result = SessionDeliveryService(state).receive_full_video_webhook(
{"session_key": "session-1", "source_title": "task-title", "full_video_bvid": "BVWEBHOOK123"}
)
self.assertEqual(result["updated_count"], 1)
self.assertEqual(repo.context.session_key, "session-1")
self.assertEqual(repo.context.full_video_bvid, "BVWEBHOOK123")
self.assertEqual(repo.session_binding_upserts[-1].full_video_bvid, "BVWEBHOOK123")
self.assertEqual(repo.action_records[-1].action_name, "webhook_full_video_uploaded")
persisted_path = Path(result["tasks"][0]["path"])
self.assertTrue(persisted_path.exists())
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVWEBHOOK123")
def test_receive_full_video_webhook_uses_source_title_to_expand_to_session(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
anchor = TaskContext(
id=None,
task_id="task-1",
session_key="session-anchor",
streamer="streamer",
room_id="room",
source_title="anchor-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
sibling = TaskContext(
id=None,
task_id="task-2",
session_key="session-anchor",
streamer="streamer",
room_id="room",
source_title="sibling-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, context=anchor, contexts=[anchor, sibling])
repo.tasks["task-2"] = Task(
"task-2",
"local_file",
"/tmp/source-2.mp4",
"task-title-2",
"published",
"2026-01-01T00:00:00+00:00",
"2026-01-01T00:00:00+00:00",
)
state = {"repo": repo, "settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}}}
result = SessionDeliveryService(state).receive_full_video_webhook(
{"source_title": "anchor-title", "full_video_bvid": "BVWEBHOOK123"}
)
self.assertEqual(result["session_key"], "session-anchor")
self.assertEqual(result["updated_count"], 2)
self.assertTrue(any(binding.session_key == "session-anchor" for binding in repo.session_binding_upserts))
self.assertTrue(any(binding.source_title == "anchor-title" for binding in repo.session_binding_upserts))
def test_merge_session_returns_error_when_task_ids_empty(self) -> None:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
repo = FakeRepo(task)
state = {"repo": repo, "settings": {"paths": {"session_dir": "/tmp/session"}}}
result = SessionDeliveryService(state).merge_session("session-1", ["", " "])
self.assertEqual(result["error"]["code"], "TASK_IDS_EMPTY")
if __name__ == "__main__":
unittest.main()

View File

@ -2,6 +2,7 @@ from __future__ import annotations
import tempfile
import unittest
from unittest.mock import patch
from pathlib import Path
from biliup_next.core.config import SettingsService
@ -78,6 +79,146 @@ class SettingsServiceTests(unittest.TestCase):
self.assertTrue((config_dir / "settings.staged.json").exists())
self.assertEqual(bundle.settings["paths"]["cookies_file"], str((root / "runtime" / "cookies.json").resolve()))
def test_load_applies_environment_overrides_before_path_normalization(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
config_dir = root / "config"
config_dir.mkdir(parents=True, exist_ok=True)
(config_dir / "settings.schema.json").write_text(
"""
{
"groups": {
"runtime": {
"database_path": {"type": "string", "default": "data/workspace/biliup_next.db"}
},
"paths": {
"stage_dir": {"type": "string", "default": "data/workspace/stage"},
"backup_dir": {"type": "string", "default": "data/workspace/backup"},
"session_dir": {"type": "string", "default": "data/workspace/session"},
"cookies_file": {"type": "string", "default": "runtime/cookies.json"},
"upload_config_file": {"type": "string", "default": "runtime/upload_config.json"}
},
"ingest": {
"ffprobe_bin": {"type": "string", "default": "ffprobe"},
"yt_dlp_cmd": {"type": "string", "default": "yt-dlp"},
"yt_dlp_format": {"type": "string", "default": ""}
},
"transcribe": {
"groq_api_key": {"type": "string", "default": "", "sensitive": true},
"ffmpeg_bin": {"type": "string", "default": "ffmpeg"}
},
"split": {
"ffmpeg_bin": {"type": "string", "default": "ffmpeg"}
},
"song_detect": {
"codex_cmd": {"type": "string", "default": "codex"},
"qwen_cmd": {"type": "string", "default": "qwen"}
},
"publish": {
"biliup_path": {"type": "string", "default": "runtime/biliup"},
"cookie_file": {"type": "string", "default": "runtime/cookies.json"}
},
"collection": {
"season_id_a": {"type": "integer", "default": 0},
"season_id_b": {"type": "integer", "default": 0}
}
}
}
""",
encoding="utf-8",
)
(config_dir / "settings.standalone.example.json").write_text(
"""
{
"runtime": {"database_path": "data/workspace/biliup_next.db"},
"paths": {
"stage_dir": "data/workspace/stage",
"backup_dir": "data/workspace/backup",
"session_dir": "data/workspace/session",
"cookies_file": "runtime/cookies.json",
"upload_config_file": "runtime/upload_config.json"
},
"ingest": {"ffprobe_bin": "ffprobe", "yt_dlp_cmd": "yt-dlp", "yt_dlp_format": ""},
"transcribe": {"groq_api_key": "", "ffmpeg_bin": "ffmpeg"},
"split": {"ffmpeg_bin": "ffmpeg"},
"song_detect": {"codex_cmd": "codex", "qwen_cmd": "qwen"},
"publish": {"biliup_path": "runtime/biliup", "cookie_file": "runtime/cookies.json"},
"collection": {"season_id_a": 0, "season_id_b": 0}
}
""",
encoding="utf-8",
)
with patch.dict(
"os.environ",
{
"GROQ_API_KEY": "gsk_test",
"COLLECTION_SEASON_ID_A": "7196643",
"BILIUP_NEXT__COLLECTION__SEASON_ID_B": "7196624",
"BILIUP_NEXT__PATHS__STAGE_DIR": "data/custom-stage",
},
clear=True,
):
bundle = SettingsService(root).load()
self.assertEqual(bundle.settings["transcribe"]["groq_api_key"], "gsk_test")
self.assertEqual(bundle.settings["collection"]["season_id_a"], 7196643)
self.assertEqual(bundle.settings["collection"]["season_id_b"], 7196624)
self.assertEqual(bundle.settings["paths"]["stage_dir"], str((root / "data" / "custom-stage").resolve()))
def test_empty_environment_values_do_not_override_settings(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
config_dir = root / "config"
config_dir.mkdir(parents=True, exist_ok=True)
(config_dir / "settings.schema.json").write_text(
"""
{
"groups": {
"runtime": {"database_path": {"type": "string", "default": "data/workspace/biliup_next.db"}},
"paths": {
"stage_dir": {"type": "string", "default": "data/workspace/stage"},
"backup_dir": {"type": "string", "default": "data/workspace/backup"},
"session_dir": {"type": "string", "default": "data/workspace/session"},
"cookies_file": {"type": "string", "default": "runtime/cookies.json"},
"upload_config_file": {"type": "string", "default": "runtime/upload_config.json"}
},
"ingest": {"ffprobe_bin": {"type": "string", "default": "ffprobe"}, "yt_dlp_cmd": {"type": "string", "default": "yt-dlp"}},
"transcribe": {"groq_api_key": {"type": "string", "default": ""}, "ffmpeg_bin": {"type": "string", "default": "ffmpeg"}},
"split": {"ffmpeg_bin": {"type": "string", "default": "ffmpeg"}},
"song_detect": {"codex_cmd": {"type": "string", "default": "codex"}, "qwen_cmd": {"type": "string", "default": "qwen"}},
"publish": {"biliup_path": {"type": "string", "default": "runtime/biliup"}, "cookie_file": {"type": "string", "default": "runtime/cookies.json"}}
}
}
""",
encoding="utf-8",
)
(config_dir / "settings.standalone.example.json").write_text(
"""
{
"runtime": {"database_path": "data/workspace/biliup_next.db"},
"paths": {
"stage_dir": "data/workspace/stage",
"backup_dir": "data/workspace/backup",
"session_dir": "data/workspace/session",
"cookies_file": "runtime/cookies.json",
"upload_config_file": "runtime/upload_config.json"
},
"ingest": {"ffprobe_bin": "ffprobe", "yt_dlp_cmd": "yt-dlp"},
"transcribe": {"groq_api_key": "from-file", "ffmpeg_bin": "ffmpeg"},
"split": {"ffmpeg_bin": "ffmpeg"},
"song_detect": {"codex_cmd": "codex", "qwen_cmd": "qwen"},
"publish": {"biliup_path": "runtime/biliup", "cookie_file": "runtime/cookies.json"}
}
""",
encoding="utf-8",
)
with patch.dict("os.environ", {"GROQ_API_KEY": ""}, clear=True):
bundle = SettingsService(root).load()
self.assertEqual(bundle.settings["transcribe"]["groq_api_key"], "from-file")
if __name__ == "__main__":
unittest.main()

View File

@ -1,21 +1,52 @@
from __future__ import annotations
from __future__ import annotations
import json
import os
import tempfile
import unittest
from pathlib import Path
from unittest.mock import patch
from biliup_next.core.models import Artifact, Task, utc_now_iso
from biliup_next.infra.adapters.codex_cli import CodexCliAdapter
from biliup_next.modules.song_detect.providers.codex import CodexSongDetector
from biliup_next.modules.song_detect.providers.qwen_cli import QwenCliSongDetector
class FakeQwenCliAdapter:
def __init__(self, returncode: int = 0) -> None:
self.returncode = returncode
self.last_qwen_cmd: str | None = None
def run_song_detect(self, *, qwen_cmd: str, work_dir: Path, prompt: str): # noqa: ANN001
self.last_qwen_cmd = qwen_cmd
songs_json_path = work_dir / "songs.json"
songs_json_path.write_text(
json.dumps(
{
"songs": [
{
"start": "00:01:23,000",
"end": "00:03:45,000",
"title": "测试歌曲",
"artist": "测试歌手",
"confidence": 0.93,
"evidence": "歌词命中",
}
]
},
ensure_ascii=False,
),
encoding="utf-8",
)
return type("Result", (), {"returncode": self.returncode, "stdout": "ok", "stderr": ""})()
class FakeCodexCliAdapter:
def __init__(self, returncode: int = 0) -> None:
self.returncode = returncode
self.last_qwen_cmd: str | None = None
def run_song_detect(self, *, qwen_cmd: str, work_dir: Path, prompt: str): # noqa: ANN001
self.last_qwen_cmd = qwen_cmd
def run_song_detect(self, *, codex_cmd: str, work_dir: Path, prompt: str): # noqa: ANN001
songs_json_path = work_dir / "songs.json"
songs_json_path.write_text(
json.dumps(
@ -35,16 +66,49 @@ class FakeQwenCliAdapter:
),
encoding="utf-8",
)
return type("Result", (), {"returncode": self.returncode, "stdout": "ok", "stderr": ""})()
return type("Result", (), {"returncode": self.returncode, "stdout": "codex stdout", "stderr": "codex stderr"})()
class SongDetectProviderTests(unittest.TestCase):
def test_qwen_cli_provider_generates_json_and_txt_artifacts(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
subtitle_path = work_dir / "subtitle.srt"
subtitle_path.write_text("1\n00:00:00,000 --> 00:00:03,000\n测试字幕\n", encoding="utf-8")
provider = QwenCliSongDetector(adapter=FakeQwenCliAdapter())
task = Task(
id="task-1",
source_type="local_file",
source_path=str(work_dir / "video.mp4"),
title="task-1",
status="transcribed",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
subtitle = Artifact(
id=None,
task_id=task.id,
artifact_type="subtitle_srt",
path=str(subtitle_path),
metadata_json=None,
created_at=utc_now_iso(),
)
songs_json, songs_txt = provider.detect(task, subtitle, {"qwen_cmd": "qwen"})
self.assertEqual(json.loads(songs_json.metadata_json)["provider"], "qwen_cli")
self.assertEqual(json.loads(songs_txt.metadata_json)["provider"], "qwen_cli")
self.assertTrue(Path(songs_json.path).exists())
self.assertTrue(Path(songs_txt.path).exists())
self.assertIn("测试歌曲", Path(songs_txt.path).read_text(encoding="utf-8"))
class SongDetectProviderTests(unittest.TestCase):
def test_qwen_cli_provider_generates_json_and_txt_artifacts(self) -> None:
def test_codex_provider_writes_execution_output_to_session_log(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
subtitle_path = work_dir / "subtitle.srt"
subtitle_path.write_text("1\n00:00:00,000 --> 00:00:03,000\n测试字幕\n", encoding="utf-8")
provider = QwenCliSongDetector(adapter=FakeQwenCliAdapter())
provider = CodexSongDetector(adapter=FakeCodexCliAdapter())
task = Task(
id="task-1",
@ -64,14 +128,42 @@ class SongDetectProviderTests(unittest.TestCase):
created_at=utc_now_iso(),
)
songs_json, songs_txt = provider.detect(task, subtitle, {"qwen_cmd": "qwen"})
songs_json, songs_txt = provider.detect(task, subtitle, {"codex_cmd": "codex"})
self.assertEqual(json.loads(songs_json.metadata_json)["provider"], "qwen_cli")
self.assertEqual(json.loads(songs_txt.metadata_json)["provider"], "qwen_cli")
self.assertTrue(Path(songs_json.path).exists())
self.assertTrue(Path(songs_txt.path).exists())
self.assertIn("测试歌曲", Path(songs_txt.path).read_text(encoding="utf-8"))
json_metadata = json.loads(songs_json.metadata_json)
txt_metadata = json.loads(songs_txt.metadata_json)
self.assertEqual(json_metadata["provider"], "codex")
self.assertEqual(txt_metadata["provider"], "codex")
self.assertNotIn("execution", json_metadata)
codex_log = work_dir / "codex.log"
self.assertTrue(codex_log.exists())
log_text = codex_log.read_text(encoding="utf-8")
self.assertIn("returncode: 0", log_text)
self.assertIn("codex stdout", log_text)
self.assertIn("codex stderr", log_text)
def test_codex_cli_adapter_disables_inner_sandbox_and_normalizes_proxy_env(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
calls = []
if __name__ == "__main__":
unittest.main()
def fake_run(cmd, **kwargs): # noqa: ANN001
calls.append((cmd, kwargs))
return type("Result", (), {"returncode": 0, "stdout": "", "stderr": ""})()
with patch.dict(os.environ, {"HTTPS_PROXY": "192.168.1.100:7897"}, clear=True):
with patch("subprocess.run", side_effect=fake_run):
CodexCliAdapter().run_song_detect(
codex_cmd="codex",
work_dir=Path(tmpdir),
prompt="detect songs",
)
cmd, kwargs = calls[0]
self.assertIn("--dangerously-bypass-approvals-and-sandbox", cmd)
self.assertNotIn("--full-auto", cmd)
self.assertNotIn("workspace-write", cmd)
self.assertEqual(kwargs["env"]["HTTPS_PROXY"], "http://192.168.1.100:7897")
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,103 @@
from __future__ import annotations
import unittest
from types import SimpleNamespace
from biliup_next.app.retry_meta import retry_meta_for_step
from biliup_next.app.task_engine import next_runnable_step
from biliup_next.app.task_policies import resolve_failure
from biliup_next.core.errors import ModuleError
from biliup_next.core.models import TaskStep
from biliup_next.modules.song_detect.providers.qwen_cli import QwenCliSongDetector
class _Repo:
def __init__(self) -> None:
self.steps = [TaskStep(None, "task-1", "song_detect", "running", None, None, 0, None, None)]
self.step_updates: list[tuple] = []
self.task_updates: list[tuple] = []
def list_steps(self, task_id: str): # noqa: ANN001
return list(self.steps)
def get_task(self, task_id: str): # noqa: ANN001
return SimpleNamespace(id=task_id, status="running")
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # noqa: ANN001
self.step_updates.append((task_id, step_name, status, kwargs))
self.steps = [
TaskStep(
None,
task_id,
step_name,
status,
kwargs.get("error_code"),
kwargs.get("error_message"),
kwargs.get("retry_count", 0),
kwargs.get("started_at"),
kwargs.get("finished_at"),
)
]
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status, updated_at))
class SongDetectRetryPolicyTests(unittest.TestCase):
def test_retry_meta_reports_wait_window_for_song_detect(self) -> None:
step = TaskStep(None, "task-1", "song_detect", "failed_retryable", "ERR", "boom", 1, None, "2099-01-01T00:00:00+00:00")
payload = retry_meta_for_step(step, {"song_detect": {"retry_schedule_minutes": [10]}})
self.assertIsNotNone(payload)
self.assertFalse(payload["retry_due"])
self.assertEqual(payload["retry_wait_seconds"], 600)
def test_next_runnable_step_waits_for_retryable_song_detect(self) -> None:
task = SimpleNamespace(id="task-1", status="failed_retryable")
steps = {
"song_detect": TaskStep(None, "task-1", "song_detect", "failed_retryable", "ERR", "boom", 1, None, "2099-01-01T00:00:00+00:00"),
}
state = {
"settings": {
"transcribe": {},
"song_detect": {"retry_schedule_minutes": [10]},
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
}
}
step_name, waiting_payload = next_runnable_step(task, steps, state)
self.assertIsNone(step_name)
self.assertIsNotNone(waiting_payload)
self.assertEqual(waiting_payload["step"], "song_detect")
def test_resolve_failure_adds_song_detect_retry_delay(self) -> None:
repo = _Repo()
task = SimpleNamespace(id="task-1", status="running")
state = {
"settings": {
"transcribe": {},
"song_detect": {"retry_schedule_minutes": [5, 10]},
"publish": {},
"comment": {},
"paths": {},
"collection": {"enabled": True},
}
}
result = resolve_failure(task, repo, state, ModuleError(code="SONG_DETECT_FAILED", message="boom", retryable=True))
self.assertEqual(result["payload"]["retry_status"], "failed_retryable")
self.assertEqual(result["payload"]["next_retry_delay_seconds"], 300)
def test_qwen_auth_errors_are_not_retryable(self) -> None:
self.assertTrue(QwenCliSongDetector._is_auth_error("[API Error: 401 invalid access token or token expired]"))
self.assertFalse(QwenCliSongDetector._is_auth_error("temporary network failure"))
if __name__ == "__main__":
unittest.main()

View File

@ -1,143 +1,143 @@
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from unittest.mock import patch
from biliup_next.app.task_actions import bind_full_video_action, merge_session_action, rebind_session_full_video_action
from biliup_next.core.models import Task, TaskContext
class FakeRepo:
def __init__(self, task: Task, context: TaskContext | None = None, contexts: list[TaskContext] | None = None) -> None:
self.task = task
self.context = context
self.contexts = contexts or ([] if context is None else [context])
self.task_context_upserts: list[TaskContext] = []
self.session_binding_upserts = []
self.updated_session_bvid: tuple[str, str, str] | None = None
def get_task(self, task_id: str) -> Task | None:
return self.task if task_id == self.task.id else None
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if task_id == self.task.id else None
def upsert_task_context(self, context: TaskContext) -> None:
self.context = context
self.task_context_upserts.append(context)
def upsert_session_binding(self, binding) -> None: # type: ignore[no-untyped-def]
self.session_binding_upserts.append(binding)
def add_action_record(self, record) -> None: # type: ignore[no-untyped-def]
return None
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
return [context for context in self.contexts if context.session_key == session_key]
def update_session_full_video_bvid(self, session_key: str, full_video_bvid: str, updated_at: str) -> int:
self.updated_session_bvid = (session_key, full_video_bvid, updated_at)
return len(self.list_task_contexts_by_session_key(session_key))
def list_task_contexts_by_source_title(self, source_title: str) -> list[TaskContext]:
return [context for context in self.contexts if context.source_title == source_title]
class TaskActionsTests(unittest.TestCase):
def test_bind_full_video_action_persists_context_binding_and_file(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
repo = FakeRepo(task)
state = {
"repo": repo,
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
}
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_actions.record_task_action"
):
result = bind_full_video_action("task-1", " BV1234567890 ")
self.assertEqual(result["full_video_bvid"], "BV1234567890")
self.assertEqual(repo.context.full_video_bvid, "BV1234567890")
self.assertEqual(len(repo.session_binding_upserts), 1)
self.assertTrue(Path(result["path"]).exists())
self.assertEqual(Path(result["path"]).read_text(encoding="utf-8"), "BV1234567890")
def test_rebind_session_full_video_action_updates_binding_and_all_task_files(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
context = TaskContext(
id=None,
task_id="task-1",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="task-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid="BVOLD",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, context=context, contexts=[context])
state = {
"repo": repo,
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
}
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_actions.record_task_action"
):
result = rebind_session_full_video_action("session-1", "BVNEW1234567")
self.assertEqual(result["updated_count"], 1)
self.assertEqual(repo.context.full_video_bvid, "BVNEW1234567")
self.assertIsNotNone(repo.updated_session_bvid)
self.assertEqual(len(repo.session_binding_upserts), 1)
self.assertEqual(repo.session_binding_upserts[-1].full_video_bvid, "BVNEW1234567")
persisted_path = Path(result["tasks"][0]["path"])
self.assertTrue(persisted_path.exists())
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVNEW1234567")
def test_merge_session_action_reuses_persist_path_for_inherited_bvid(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
existing_context = TaskContext(
id=None,
task_id="existing-task",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="existing-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid="BVINHERITED123",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, contexts=[existing_context])
state = {
"repo": repo,
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
}
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_actions.record_task_action"
):
result = merge_session_action("session-1", ["task-1"])
self.assertEqual(result["merged_count"], 1)
self.assertEqual(repo.context.full_video_bvid, "BVINHERITED123")
self.assertEqual(len(repo.session_binding_upserts), 1)
self.assertEqual(repo.session_binding_upserts[0].full_video_bvid, "BVINHERITED123")
self.assertIn("path", result["tasks"][0])
persisted_path = Path(result["tasks"][0]["path"])
self.assertTrue(persisted_path.exists())
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVINHERITED123")
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from unittest.mock import patch
from biliup_next.app.task_actions import bind_full_video_action, merge_session_action, rebind_session_full_video_action
from biliup_next.core.models import Task, TaskContext
class FakeRepo:
def __init__(self, task: Task, context: TaskContext | None = None, contexts: list[TaskContext] | None = None) -> None:
self.task = task
self.context = context
self.contexts = contexts or ([] if context is None else [context])
self.task_context_upserts: list[TaskContext] = []
self.session_binding_upserts = []
self.updated_session_bvid: tuple[str, str, str] | None = None
def get_task(self, task_id: str) -> Task | None:
return self.task if task_id == self.task.id else None
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if task_id == self.task.id else None
def upsert_task_context(self, context: TaskContext) -> None:
self.context = context
self.task_context_upserts.append(context)
def upsert_session_binding(self, binding) -> None: # type: ignore[no-untyped-def]
self.session_binding_upserts.append(binding)
def add_action_record(self, record) -> None: # type: ignore[no-untyped-def]
return None
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
return [context for context in self.contexts if context.session_key == session_key]
def update_session_full_video_bvid(self, session_key: str, full_video_bvid: str, updated_at: str) -> int:
self.updated_session_bvid = (session_key, full_video_bvid, updated_at)
return len(self.list_task_contexts_by_session_key(session_key))
def list_task_contexts_by_source_title(self, source_title: str) -> list[TaskContext]:
return [context for context in self.contexts if context.source_title == source_title]
class TaskActionsTests(unittest.TestCase):
def test_bind_full_video_action_persists_context_binding_and_file(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
repo = FakeRepo(task)
state = {
"repo": repo,
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
}
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_actions.record_task_action"
):
result = bind_full_video_action("task-1", " BV1234567890 ")
self.assertEqual(result["full_video_bvid"], "BV1234567890")
self.assertEqual(repo.context.full_video_bvid, "BV1234567890")
self.assertEqual(len(repo.session_binding_upserts), 1)
self.assertTrue(Path(result["path"]).exists())
self.assertEqual(Path(result["path"]).read_text(encoding="utf-8"), "BV1234567890")
def test_rebind_session_full_video_action_updates_binding_and_all_task_files(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
context = TaskContext(
id=None,
task_id="task-1",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="task-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid="BVOLD",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, context=context, contexts=[context])
state = {
"repo": repo,
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
}
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_actions.record_task_action"
):
result = rebind_session_full_video_action("session-1", "BVNEW1234567")
self.assertEqual(result["updated_count"], 1)
self.assertEqual(repo.context.full_video_bvid, "BVNEW1234567")
self.assertIsNotNone(repo.updated_session_bvid)
self.assertEqual(len(repo.session_binding_upserts), 1)
self.assertEqual(repo.session_binding_upserts[-1].full_video_bvid, "BVNEW1234567")
persisted_path = Path(result["tasks"][0]["path"])
self.assertTrue(persisted_path.exists())
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVNEW1234567")
def test_merge_session_action_reuses_persist_path_for_inherited_bvid(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
existing_context = TaskContext(
id=None,
task_id="existing-task",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="existing-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid="BVINHERITED123",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, contexts=[existing_context])
state = {
"repo": repo,
"settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}},
}
with patch("biliup_next.app.task_actions.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_actions.record_task_action"
):
result = merge_session_action("session-1", ["task-1"])
self.assertEqual(result["merged_count"], 1)
self.assertEqual(repo.context.full_video_bvid, "BVINHERITED123")
self.assertEqual(len(repo.session_binding_upserts), 1)
self.assertEqual(repo.session_binding_upserts[0].full_video_bvid, "BVINHERITED123")
self.assertIn("path", result["tasks"][0])
persisted_path = Path(result["tasks"][0]["path"])
self.assertTrue(persisted_path.exists())
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVINHERITED123")
if __name__ == "__main__":
unittest.main()

View File

@ -1,46 +1,46 @@
from __future__ import annotations
import unittest
from types import SimpleNamespace
from unittest.mock import patch
from biliup_next.app.task_control_service import TaskControlService
class TaskControlServiceTests(unittest.TestCase):
def test_run_task_delegates_to_process_task(self) -> None:
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"task_id": "task-1"}]}) as process_mock:
result = TaskControlService(state).run_task("task-1")
self.assertEqual(result["processed"][0]["task_id"], "task-1")
process_mock.assert_called_once_with("task-1")
def test_retry_step_delegates_with_reset_step(self) -> None:
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"step": "publish"}]}) as process_mock:
result = TaskControlService(state).retry_step("task-1", "publish")
self.assertEqual(result["processed"][0]["step"], "publish")
process_mock.assert_called_once_with("task-1", reset_step="publish")
def test_reset_to_step_combines_reset_and_run_payloads(self) -> None:
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
reset_service = SimpleNamespace(reset_to_step=lambda task_id, step_name: {"task_id": task_id, "reset_to": step_name})
with patch("biliup_next.app.task_control_service.TaskResetService", return_value=reset_service) as reset_cls:
with patch.object(reset_service, "reset_to_step", return_value={"task_id": "task-1", "reset_to": "split"}) as reset_mock:
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"task_id": "task-1"}]}) as process_mock:
result = TaskControlService(state).reset_to_step("task-1", "split")
self.assertEqual(result["reset"]["reset_to"], "split")
self.assertEqual(result["run"]["processed"][0]["task_id"], "task-1")
reset_cls.assert_called_once()
reset_mock.assert_called_once_with("task-1", "split")
process_mock.assert_called_once_with("task-1")
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import unittest
from types import SimpleNamespace
from unittest.mock import patch
from biliup_next.app.task_control_service import TaskControlService
class TaskControlServiceTests(unittest.TestCase):
def test_run_task_delegates_to_process_task(self) -> None:
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"task_id": "task-1"}]}) as process_mock:
result = TaskControlService(state).run_task("task-1")
self.assertEqual(result["processed"][0]["task_id"], "task-1")
process_mock.assert_called_once_with("task-1")
def test_retry_step_delegates_with_reset_step(self) -> None:
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"step": "publish"}]}) as process_mock:
result = TaskControlService(state).retry_step("task-1", "publish")
self.assertEqual(result["processed"][0]["step"], "publish")
process_mock.assert_called_once_with("task-1", reset_step="publish")
def test_reset_to_step_combines_reset_and_run_payloads(self) -> None:
state = {"repo": object(), "settings": {"paths": {"session_dir": "/tmp/session"}}}
reset_service = SimpleNamespace(reset_to_step=lambda task_id, step_name: {"task_id": task_id, "reset_to": step_name})
with patch("biliup_next.app.task_control_service.TaskResetService", return_value=reset_service) as reset_cls:
with patch.object(reset_service, "reset_to_step", return_value={"task_id": "task-1", "reset_to": "split"}) as reset_mock:
with patch("biliup_next.app.task_control_service.process_task", return_value={"processed": [{"task_id": "task-1"}]}) as process_mock:
result = TaskControlService(state).reset_to_step("task-1", "split")
self.assertEqual(result["reset"]["reset_to"], "split")
self.assertEqual(result["run"]["processed"][0]["task_id"], "task-1")
reset_cls.assert_called_once()
reset_mock.assert_called_once_with("task-1", "split")
process_mock.assert_called_once_with("task-1")
if __name__ == "__main__":
unittest.main()

View File

@ -1,133 +1,134 @@
from __future__ import annotations
from __future__ import annotations
import unittest
from types import SimpleNamespace
from biliup_next.app.task_engine import infer_error_step_name, next_runnable_step
from biliup_next.core.models import TaskStep
class TaskEngineTests(unittest.TestCase):
def test_infer_error_step_name_prefers_running_step(self) -> None:
task = SimpleNamespace(status="running")
steps = {
"transcribe": TaskStep(None, "task-1", "transcribe", "running", None, None, 0, None, None),
"song_detect": TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
}
self.assertEqual(infer_error_step_name(task, steps), "transcribe")
def test_infer_error_step_name_prefers_running_step(self) -> None:
task = SimpleNamespace(status="running")
steps = {
"transcribe": TaskStep(None, "task-1", "transcribe", "running", None, None, 0, None, None),
"song_detect": TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
}
self.assertEqual(infer_error_step_name(task, steps), "transcribe")
def test_next_runnable_step_returns_none_while_a_step_is_running(self) -> None:
task = SimpleNamespace(id="task-1", status="running")
steps = {
"transcribe": TaskStep(None, "task-1", "transcribe", "running", None, None, 0, None, None),
"song_detect": TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
}
state = {
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
}
}
task = SimpleNamespace(id="task-1", status="running")
steps = {
"transcribe": TaskStep(None, "task-1", "transcribe", "running", None, None, 0, None, None),
"song_detect": TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
}
state = {
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
}
}
self.assertEqual(next_runnable_step(task, steps, state), (None, None))
def test_next_runnable_step_returns_wait_payload_for_retryable_publish(self) -> None:
task = SimpleNamespace(id="task-1", status="failed_retryable")
steps = {
"publish": TaskStep(
None,
"task-1",
"publish",
"failed_retryable",
"PUBLISH_UPLOAD_FAILED",
"upload failed",
1,
None,
"2099-01-01T00:00:00+00:00",
)
}
def test_next_runnable_step_returns_wait_payload_for_retryable_publish(self) -> None:
task = SimpleNamespace(id="task-1", status="failed_retryable")
steps = {
"publish": TaskStep(
None,
"task-1",
"publish",
"failed_retryable",
"PUBLISH_UPLOAD_FAILED",
"upload failed",
1,
None,
"2099-01-01T00:00:00+00:00",
)
}
state = {
"settings": {
"transcribe": {},
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {"retry_schedule_minutes": [10]},
}
}
step_name, waiting_payload = next_runnable_step(task, steps, state)
self.assertIsNone(step_name)
self.assertIsNotNone(waiting_payload)
self.assertTrue(waiting_payload["waiting_for_retry"])
self.assertEqual(waiting_payload["step"], "publish")
def test_next_runnable_step_blocks_non_anchor_session_publish_until_anchor_runs(self) -> None:
task = SimpleNamespace(id="task-2", status="split_done")
steps = {
"publish": TaskStep(None, "task-2", "publish", "pending", None, None, 0, None, None),
}
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
return SimpleNamespace(task_id=task_id, session_key="session-1")
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [
SimpleNamespace(task_id="task-1", segment_started_at="2026-04-04T09:23:00+08:00", source_title="part-1"),
SimpleNamespace(task_id="task-2", segment_started_at="2026-04-04T09:25:00+08:00", source_title="part-2"),
]
def get_task(self, task_id): # noqa: ANN001
status = "split_done"
return SimpleNamespace(id=task_id, status=status)
state = {
"repo": _Repo(),
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
},
}
self.assertEqual(next_runnable_step(task, steps, state), (None, None))
def test_next_runnable_step_allows_anchor_session_publish_when_all_parts_split_done(self) -> None:
task = SimpleNamespace(id="task-1", status="split_done")
steps = {
"publish": TaskStep(None, "task-1", "publish", "pending", None, None, 0, None, None),
}
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
return SimpleNamespace(task_id=task_id, session_key="session-1")
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [
SimpleNamespace(task_id="task-1", segment_started_at="2026-04-04T09:23:00+08:00", source_title="part-1"),
SimpleNamespace(task_id="task-2", segment_started_at="2026-04-04T09:25:00+08:00", source_title="part-2"),
]
def get_task(self, task_id): # noqa: ANN001
return SimpleNamespace(id=task_id, status="split_done")
state = {
"repo": _Repo(),
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
},
}
self.assertEqual(next_runnable_step(task, steps, state), ("publish", None))
if __name__ == "__main__":
unittest.main()
}
step_name, waiting_payload = next_runnable_step(task, steps, state)
self.assertIsNone(step_name)
self.assertIsNotNone(waiting_payload)
self.assertTrue(waiting_payload["waiting_for_retry"])
self.assertEqual(waiting_payload["step"], "publish")
def test_next_runnable_step_blocks_non_anchor_session_publish_until_anchor_runs(self) -> None:
task = SimpleNamespace(id="task-2", status="split_done")
steps = {
"publish": TaskStep(None, "task-2", "publish", "pending", None, None, 0, None, None),
}
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
return SimpleNamespace(task_id=task_id, session_key="session-1")
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [
SimpleNamespace(task_id="task-1", segment_started_at="2026-04-04T09:23:00+08:00", source_title="part-1"),
SimpleNamespace(task_id="task-2", segment_started_at="2026-04-04T09:25:00+08:00", source_title="part-2"),
]
def get_task(self, task_id): # noqa: ANN001
status = "split_done"
return SimpleNamespace(id=task_id, status=status)
state = {
"repo": _Repo(),
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
},
}
self.assertEqual(next_runnable_step(task, steps, state), (None, None))
def test_next_runnable_step_allows_anchor_session_publish_when_all_parts_split_done(self) -> None:
task = SimpleNamespace(id="task-1", status="split_done")
steps = {
"publish": TaskStep(None, "task-1", "publish", "pending", None, None, 0, None, None),
}
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
return SimpleNamespace(task_id=task_id, session_key="session-1")
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [
SimpleNamespace(task_id="task-1", segment_started_at="2026-04-04T09:23:00+08:00", source_title="part-1"),
SimpleNamespace(task_id="task-2", segment_started_at="2026-04-04T09:25:00+08:00", source_title="part-2"),
]
def get_task(self, task_id): # noqa: ANN001
return SimpleNamespace(id=task_id, status="split_done")
state = {
"repo": _Repo(),
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
},
}
self.assertEqual(next_runnable_step(task, steps, state), ("publish", None))
if __name__ == "__main__":
unittest.main()

View File

@ -1,95 +1,95 @@
from __future__ import annotations
import unittest
from types import SimpleNamespace
from biliup_next.app.task_policies import apply_disabled_step_fallbacks, resolve_failure
from biliup_next.core.errors import ModuleError
from biliup_next.core.models import TaskStep
class FakePolicyRepo:
def __init__(self, task, steps: list[TaskStep]) -> None: # type: ignore[no-untyped-def]
self.task = task
self.steps = steps
self.step_updates: list[tuple] = []
self.task_updates: list[tuple] = []
def get_task(self, task_id: str): # type: ignore[no-untyped-def]
return self.task if task_id == self.task.id else None
def list_steps(self, task_id: str) -> list[TaskStep]:
return list(self.steps) if task_id == self.task.id else []
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # type: ignore[no-untyped-def]
self.step_updates.append((task_id, step_name, status, kwargs))
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status, updated_at))
class TaskPoliciesTests(unittest.TestCase):
def test_apply_disabled_step_fallbacks_marks_collection_done_when_disabled(self) -> None:
task = SimpleNamespace(id="task-1", status="commented")
repo = FakePolicyRepo(task, [])
state = {
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": False},
"paths": {},
"publish": {},
}
}
changed = apply_disabled_step_fallbacks(state, task, repo)
self.assertTrue(changed)
self.assertEqual([update[1] for update in repo.step_updates], ["collection_a", "collection_b"])
self.assertEqual(repo.task_updates[-1][1], "collection_synced")
def test_resolve_failure_uses_publish_retry_schedule(self) -> None:
task = SimpleNamespace(id="task-1", status="running")
steps = [
TaskStep(None, "task-1", "publish", "running", None, None, 0, "2026-01-01T00:00:00+00:00", None),
]
repo = FakePolicyRepo(task, steps)
state = {
"settings": {
"publish": {"retry_schedule_minutes": [15, 5]},
"comment": {},
"paths": {},
}
}
exc = ModuleError(code="PUBLISH_UPLOAD_FAILED", message="upload failed", retryable=True)
failure = resolve_failure(task, repo, state, exc)
self.assertEqual(failure["step_name"], "publish")
self.assertEqual(failure["payload"]["retry_status"], "failed_retryable")
self.assertEqual(failure["payload"]["next_retry_delay_seconds"], 900)
self.assertEqual(repo.step_updates[-1][1], "publish")
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
def test_resolve_failure_uses_rate_limit_schedule_for_publish_601(self) -> None:
task = SimpleNamespace(id="task-1", status="running")
steps = [
TaskStep(None, "task-1", "publish", "running", None, None, 0, "2026-01-01T00:00:00+00:00", None),
]
repo = FakePolicyRepo(task, steps)
state = {
"settings": {
"publish": {"retry_schedule_minutes": [15, 5], "rate_limit_retry_schedule_minutes": [30, 60]},
"comment": {},
"paths": {},
}
}
exc = ModuleError(code="PUBLISH_RATE_LIMITED", message="rate limited", retryable=True)
failure = resolve_failure(task, repo, state, exc)
self.assertEqual(failure["payload"]["next_retry_delay_seconds"], 1800)
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import unittest
from types import SimpleNamespace
from biliup_next.app.task_policies import apply_disabled_step_fallbacks, resolve_failure
from biliup_next.core.errors import ModuleError
from biliup_next.core.models import TaskStep
class FakePolicyRepo:
def __init__(self, task, steps: list[TaskStep]) -> None: # type: ignore[no-untyped-def]
self.task = task
self.steps = steps
self.step_updates: list[tuple] = []
self.task_updates: list[tuple] = []
def get_task(self, task_id: str): # type: ignore[no-untyped-def]
return self.task if task_id == self.task.id else None
def list_steps(self, task_id: str) -> list[TaskStep]:
return list(self.steps) if task_id == self.task.id else []
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # type: ignore[no-untyped-def]
self.step_updates.append((task_id, step_name, status, kwargs))
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status, updated_at))
class TaskPoliciesTests(unittest.TestCase):
def test_apply_disabled_step_fallbacks_marks_collection_done_when_disabled(self) -> None:
task = SimpleNamespace(id="task-1", status="commented")
repo = FakePolicyRepo(task, [])
state = {
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": False},
"paths": {},
"publish": {},
}
}
changed = apply_disabled_step_fallbacks(state, task, repo)
self.assertTrue(changed)
self.assertEqual([update[1] for update in repo.step_updates], ["collection_a", "collection_b"])
self.assertEqual(repo.task_updates[-1][1], "collection_synced")
def test_resolve_failure_uses_publish_retry_schedule(self) -> None:
task = SimpleNamespace(id="task-1", status="running")
steps = [
TaskStep(None, "task-1", "publish", "running", None, None, 0, "2026-01-01T00:00:00+00:00", None),
]
repo = FakePolicyRepo(task, steps)
state = {
"settings": {
"publish": {"retry_schedule_minutes": [15, 5]},
"comment": {},
"paths": {},
}
}
exc = ModuleError(code="PUBLISH_UPLOAD_FAILED", message="upload failed", retryable=True)
failure = resolve_failure(task, repo, state, exc)
self.assertEqual(failure["step_name"], "publish")
self.assertEqual(failure["payload"]["retry_status"], "failed_retryable")
self.assertEqual(failure["payload"]["next_retry_delay_seconds"], 900)
self.assertEqual(repo.step_updates[-1][1], "publish")
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
def test_resolve_failure_uses_rate_limit_schedule_for_publish_601(self) -> None:
task = SimpleNamespace(id="task-1", status="running")
steps = [
TaskStep(None, "task-1", "publish", "running", None, None, 0, "2026-01-01T00:00:00+00:00", None),
]
repo = FakePolicyRepo(task, steps)
state = {
"settings": {
"publish": {"retry_schedule_minutes": [15, 5], "rate_limit_retry_schedule_minutes": [30, 60]},
"comment": {},
"paths": {},
}
}
exc = ModuleError(code="PUBLISH_RATE_LIMITED", message="rate limited", retryable=True)
failure = resolve_failure(task, repo, state, exc)
self.assertEqual(failure["payload"]["next_retry_delay_seconds"], 1800)
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
if __name__ == "__main__":
unittest.main()

View File

@ -1,121 +1,121 @@
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from biliup_next.core.models import SessionBinding, Task, TaskContext, TaskStep
from biliup_next.infra.db import Database
from biliup_next.infra.task_repository import TaskRepository
class TaskRepositorySqliteTests(unittest.TestCase):
def setUp(self) -> None:
self.tempdir = tempfile.TemporaryDirectory()
db_path = Path(self.tempdir.name) / "test.db"
self.db = Database(db_path)
self.db.initialize()
self.repo = TaskRepository(self.db)
def tearDown(self) -> None:
self.tempdir.cleanup()
def test_query_tasks_filters_and_sorts_by_updated_desc(self) -> None:
self.repo.upsert_task(Task("task-1", "local_file", "/tmp/a.mp4", "Alpha", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"))
self.repo.upsert_task(Task("task-2", "local_file", "/tmp/b.mp4", "Beta", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:03:00+00:00"))
self.repo.upsert_task(Task("task-3", "local_file", "/tmp/c.mp4", "Gamma", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00"))
items, total = self.repo.query_tasks(status="published", search="a", sort="updated_desc")
self.assertEqual(total, 2)
self.assertEqual([item.id for item in items], ["task-2", "task-3"])
def test_list_task_contexts_and_steps_for_task_ids_returns_batched_maps(self) -> None:
self.repo.upsert_task(Task("task-1", "local_file", "/tmp/a.mp4", "Alpha", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"))
self.repo.upsert_task(Task("task-2", "local_file", "/tmp/b.mp4", "Beta", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00"))
self.repo.upsert_task_context(
TaskContext(
id=None,
task_id="task-1",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="Alpha",
segment_started_at="2026-01-01T00:00:00+00:00",
segment_duration_seconds=60.0,
full_video_bvid="BV123",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
)
self.repo.replace_steps(
"task-1",
[
TaskStep(None, "task-1", "transcribe", "pending", None, None, 0, None, None),
TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
],
)
self.repo.replace_steps(
"task-2",
[
TaskStep(None, "task-2", "transcribe", "running", None, None, 0, "2026-01-01T00:03:00+00:00", None),
],
)
contexts = self.repo.list_task_contexts_for_task_ids(["task-1", "task-2"])
steps = self.repo.list_steps_for_task_ids(["task-1", "task-2"])
self.assertEqual(set(contexts.keys()), {"task-1"})
self.assertEqual(contexts["task-1"].full_video_bvid, "BV123")
self.assertEqual([step.step_name for step in steps["task-1"]], ["transcribe", "song_detect"])
self.assertEqual(steps["task-2"][0].status, "running")
def test_session_binding_supports_upsert_and_source_title_fallback_lookup(self) -> None:
self.repo.upsert_session_binding(
SessionBinding(
id=None,
session_key="session-1",
source_title="Alpha",
streamer="streamer",
room_id="room",
full_video_bvid="BVOLD",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
)
self.repo.upsert_session_binding(
SessionBinding(
id=None,
session_key="session-1",
source_title="Alpha",
streamer="streamer",
room_id="room",
full_video_bvid="BVNEW",
created_at="2026-01-01T00:01:00+00:00",
updated_at="2026-01-01T00:01:00+00:00",
)
)
self.repo.upsert_session_binding(
SessionBinding(
id=None,
session_key=None,
source_title="Beta",
streamer="streamer-2",
room_id="room-2",
full_video_bvid="BVBETA",
created_at="2026-01-01T00:02:00+00:00",
updated_at="2026-01-01T00:02:00+00:00",
)
)
binding_by_session = self.repo.get_session_binding(session_key="session-1")
binding_by_title = self.repo.get_session_binding(source_title="Beta")
self.assertIsNotNone(binding_by_session)
self.assertEqual(binding_by_session.full_video_bvid, "BVNEW")
self.assertIsNotNone(binding_by_title)
self.assertEqual(binding_by_title.full_video_bvid, "BVBETA")
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from biliup_next.core.models import SessionBinding, Task, TaskContext, TaskStep
from biliup_next.infra.db import Database
from biliup_next.infra.task_repository import TaskRepository
class TaskRepositorySqliteTests(unittest.TestCase):
def setUp(self) -> None:
self.tempdir = tempfile.TemporaryDirectory()
db_path = Path(self.tempdir.name) / "test.db"
self.db = Database(db_path)
self.db.initialize()
self.repo = TaskRepository(self.db)
def tearDown(self) -> None:
self.tempdir.cleanup()
def test_query_tasks_filters_and_sorts_by_updated_desc(self) -> None:
self.repo.upsert_task(Task("task-1", "local_file", "/tmp/a.mp4", "Alpha", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"))
self.repo.upsert_task(Task("task-2", "local_file", "/tmp/b.mp4", "Beta", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:03:00+00:00"))
self.repo.upsert_task(Task("task-3", "local_file", "/tmp/c.mp4", "Gamma", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00"))
items, total = self.repo.query_tasks(status="published", search="a", sort="updated_desc")
self.assertEqual(total, 2)
self.assertEqual([item.id for item in items], ["task-2", "task-3"])
def test_list_task_contexts_and_steps_for_task_ids_returns_batched_maps(self) -> None:
self.repo.upsert_task(Task("task-1", "local_file", "/tmp/a.mp4", "Alpha", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"))
self.repo.upsert_task(Task("task-2", "local_file", "/tmp/b.mp4", "Beta", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:02:00+00:00"))
self.repo.upsert_task_context(
TaskContext(
id=None,
task_id="task-1",
session_key="session-1",
streamer="streamer",
room_id="room",
source_title="Alpha",
segment_started_at="2026-01-01T00:00:00+00:00",
segment_duration_seconds=60.0,
full_video_bvid="BV123",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
)
self.repo.replace_steps(
"task-1",
[
TaskStep(None, "task-1", "transcribe", "pending", None, None, 0, None, None),
TaskStep(None, "task-1", "song_detect", "pending", None, None, 0, None, None),
],
)
self.repo.replace_steps(
"task-2",
[
TaskStep(None, "task-2", "transcribe", "running", None, None, 0, "2026-01-01T00:03:00+00:00", None),
],
)
contexts = self.repo.list_task_contexts_for_task_ids(["task-1", "task-2"])
steps = self.repo.list_steps_for_task_ids(["task-1", "task-2"])
self.assertEqual(set(contexts.keys()), {"task-1"})
self.assertEqual(contexts["task-1"].full_video_bvid, "BV123")
self.assertEqual([step.step_name for step in steps["task-1"]], ["transcribe", "song_detect"])
self.assertEqual(steps["task-2"][0].status, "running")
def test_session_binding_supports_upsert_and_source_title_fallback_lookup(self) -> None:
self.repo.upsert_session_binding(
SessionBinding(
id=None,
session_key="session-1",
source_title="Alpha",
streamer="streamer",
room_id="room",
full_video_bvid="BVOLD",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
)
self.repo.upsert_session_binding(
SessionBinding(
id=None,
session_key="session-1",
source_title="Alpha",
streamer="streamer",
room_id="room",
full_video_bvid="BVNEW",
created_at="2026-01-01T00:01:00+00:00",
updated_at="2026-01-01T00:01:00+00:00",
)
)
self.repo.upsert_session_binding(
SessionBinding(
id=None,
session_key=None,
source_title="Beta",
streamer="streamer-2",
room_id="room-2",
full_video_bvid="BVBETA",
created_at="2026-01-01T00:02:00+00:00",
updated_at="2026-01-01T00:02:00+00:00",
)
)
binding_by_session = self.repo.get_session_binding(session_key="session-1")
binding_by_title = self.repo.get_session_binding(source_title="Beta")
self.assertIsNotNone(binding_by_session)
self.assertEqual(binding_by_session.full_video_bvid, "BVNEW")
self.assertIsNotNone(binding_by_title)
self.assertEqual(binding_by_title.full_video_bvid, "BVBETA")
if __name__ == "__main__":
unittest.main()

View File

@ -1,136 +1,136 @@
from __future__ import annotations
import unittest
from types import SimpleNamespace
from unittest.mock import patch
from biliup_next.core.errors import ModuleError
from biliup_next.app.task_runner import process_task
from biliup_next.core.models import TaskStep
class FakeRunnerRepo:
def __init__(self, task, steps: list[TaskStep]) -> None: # type: ignore[no-untyped-def]
self.task = task
self.steps = steps
self.step_updates: list[tuple] = []
self.task_updates: list[tuple] = []
self.claims: list[tuple[str, str, str]] = []
def get_task(self, task_id: str): # type: ignore[no-untyped-def]
return self.task if task_id == self.task.id else None
def list_steps(self, task_id: str) -> list[TaskStep]:
return list(self.steps) if task_id == self.task.id else []
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # type: ignore[no-untyped-def]
self.step_updates.append((task_id, step_name, status, kwargs))
for index, step in enumerate(self.steps):
if step.task_id == task_id and step.step_name == step_name:
self.steps[index] = TaskStep(
step.id,
step.task_id,
step.step_name,
status,
kwargs.get("error_code", step.error_code),
kwargs.get("error_message", step.error_message),
kwargs.get("retry_count", step.retry_count),
kwargs.get("started_at", step.started_at),
kwargs.get("finished_at", step.finished_at),
)
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status, updated_at))
if task_id == self.task.id:
self.task = SimpleNamespace(**{**self.task.__dict__, "status": status, "updated_at": updated_at})
def claim_step_running(self, task_id: str, step_name: str, *, started_at: str) -> bool:
self.claims.append((task_id, step_name, started_at))
for index, step in enumerate(self.steps):
if step.task_id == task_id and step.step_name == step_name:
self.steps[index] = TaskStep(step.id, step.task_id, step.step_name, "running", None, None, step.retry_count, started_at, None)
return True
class TaskRunnerTests(unittest.TestCase):
def test_process_task_reset_step_marks_task_back_to_pre_step_status(self) -> None:
task = SimpleNamespace(id="task-1", status="failed_retryable", updated_at="2026-01-01T00:00:00+00:00")
steps = [
TaskStep(None, "task-1", "transcribe", "failed_retryable", "ERR", "boom", 1, "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"),
]
repo = FakeRunnerRepo(task, steps)
state = {
"repo": repo,
"settings": {"ingest": {}, "paths": {}, "comment": {"enabled": True}, "collection": {"enabled": True}, "publish": {}},
}
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_runner.record_task_action"
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
"biliup_next.app.task_runner.next_runnable_step", return_value=(None, None)
):
result = process_task("task-1", reset_step="transcribe")
self.assertTrue(result["processed"][0]["reset"])
self.assertEqual(repo.step_updates[0][1], "transcribe")
self.assertEqual(repo.step_updates[0][2], "pending")
self.assertEqual(repo.task_updates[0][1], "created")
def test_process_task_sets_task_running_before_execute_step(self) -> None:
task = SimpleNamespace(id="task-1", status="created", updated_at="2026-01-01T00:00:00+00:00")
steps = [
TaskStep(None, "task-1", "transcribe", "pending", None, None, 0, None, None),
]
repo = FakeRunnerRepo(task, steps)
state = {
"repo": repo,
"settings": {"ingest": {}, "paths": {}, "comment": {"enabled": True}, "collection": {"enabled": True}, "publish": {}},
}
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_runner.record_task_action"
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
"biliup_next.app.task_runner.next_runnable_step", side_effect=[("transcribe", None), (None, None)]
), patch("biliup_next.app.task_runner.execute_step", return_value={"task_id": "task-1", "step": "transcribe"}):
result = process_task("task-1")
self.assertEqual(repo.claims[0][1], "transcribe")
self.assertEqual(repo.task_updates[0][1], "running")
self.assertEqual(result["processed"][0]["step"], "transcribe")
def test_process_task_marks_publish_failed_retryable_on_module_error(self) -> None:
task = SimpleNamespace(id="task-1", status="split_done", updated_at="2026-01-01T00:00:00+00:00")
steps = [
TaskStep(None, "task-1", "publish", "pending", None, None, 0, None, None),
]
repo = FakeRunnerRepo(task, steps)
state = {
"repo": repo,
"settings": {
"ingest": {},
"paths": {},
"comment": {"enabled": True},
"collection": {"enabled": True},
"publish": {"retry_schedule_minutes": [15], "rate_limit_retry_schedule_minutes": [30]},
},
}
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_runner.record_task_action"
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
"biliup_next.app.task_runner.next_runnable_step", return_value=("publish", None)
), patch(
"biliup_next.app.task_runner.execute_step",
side_effect=ModuleError(code="PUBLISH_RATE_LIMITED", message="rate limited", retryable=True),
):
result = process_task("task-1")
self.assertEqual(result["processed"][-1]["retry_status"], "failed_retryable")
self.assertEqual(result["processed"][-1]["next_retry_delay_seconds"], 1800)
self.assertEqual(repo.step_updates[-1][1], "publish")
self.assertEqual(repo.step_updates[-1][2], "failed_retryable")
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
if __name__ == "__main__":
unittest.main()
from __future__ import annotations
import unittest
from types import SimpleNamespace
from unittest.mock import patch
from biliup_next.core.errors import ModuleError
from biliup_next.app.task_runner import process_task
from biliup_next.core.models import TaskStep
class FakeRunnerRepo:
def __init__(self, task, steps: list[TaskStep]) -> None: # type: ignore[no-untyped-def]
self.task = task
self.steps = steps
self.step_updates: list[tuple] = []
self.task_updates: list[tuple] = []
self.claims: list[tuple[str, str, str]] = []
def get_task(self, task_id: str): # type: ignore[no-untyped-def]
return self.task if task_id == self.task.id else None
def list_steps(self, task_id: str) -> list[TaskStep]:
return list(self.steps) if task_id == self.task.id else []
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # type: ignore[no-untyped-def]
self.step_updates.append((task_id, step_name, status, kwargs))
for index, step in enumerate(self.steps):
if step.task_id == task_id and step.step_name == step_name:
self.steps[index] = TaskStep(
step.id,
step.task_id,
step.step_name,
status,
kwargs.get("error_code", step.error_code),
kwargs.get("error_message", step.error_message),
kwargs.get("retry_count", step.retry_count),
kwargs.get("started_at", step.started_at),
kwargs.get("finished_at", step.finished_at),
)
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status, updated_at))
if task_id == self.task.id:
self.task = SimpleNamespace(**{**self.task.__dict__, "status": status, "updated_at": updated_at})
def claim_step_running(self, task_id: str, step_name: str, *, started_at: str) -> bool:
self.claims.append((task_id, step_name, started_at))
for index, step in enumerate(self.steps):
if step.task_id == task_id and step.step_name == step_name:
self.steps[index] = TaskStep(step.id, step.task_id, step.step_name, "running", None, None, step.retry_count, started_at, None)
return True
class TaskRunnerTests(unittest.TestCase):
def test_process_task_reset_step_marks_task_back_to_pre_step_status(self) -> None:
task = SimpleNamespace(id="task-1", status="failed_retryable", updated_at="2026-01-01T00:00:00+00:00")
steps = [
TaskStep(None, "task-1", "transcribe", "failed_retryable", "ERR", "boom", 1, "2026-01-01T00:00:00+00:00", "2026-01-01T00:01:00+00:00"),
]
repo = FakeRunnerRepo(task, steps)
state = {
"repo": repo,
"settings": {"ingest": {}, "paths": {}, "comment": {"enabled": True}, "collection": {"enabled": True}, "publish": {}},
}
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_runner.record_task_action"
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
"biliup_next.app.task_runner.next_runnable_step", return_value=(None, None)
):
result = process_task("task-1", reset_step="transcribe")
self.assertTrue(result["processed"][0]["reset"])
self.assertEqual(repo.step_updates[0][1], "transcribe")
self.assertEqual(repo.step_updates[0][2], "pending")
self.assertEqual(repo.task_updates[0][1], "created")
def test_process_task_sets_task_running_before_execute_step(self) -> None:
task = SimpleNamespace(id="task-1", status="created", updated_at="2026-01-01T00:00:00+00:00")
steps = [
TaskStep(None, "task-1", "transcribe", "pending", None, None, 0, None, None),
]
repo = FakeRunnerRepo(task, steps)
state = {
"repo": repo,
"settings": {"ingest": {}, "paths": {}, "comment": {"enabled": True}, "collection": {"enabled": True}, "publish": {}},
}
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_runner.record_task_action"
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
"biliup_next.app.task_runner.next_runnable_step", side_effect=[("transcribe", None), (None, None)]
), patch("biliup_next.app.task_runner.execute_step", return_value={"task_id": "task-1", "step": "transcribe"}):
result = process_task("task-1")
self.assertEqual(repo.claims[0][1], "transcribe")
self.assertEqual(repo.task_updates[0][1], "running")
self.assertEqual(result["processed"][0]["step"], "transcribe")
def test_process_task_marks_publish_failed_retryable_on_module_error(self) -> None:
task = SimpleNamespace(id="task-1", status="split_done", updated_at="2026-01-01T00:00:00+00:00")
steps = [
TaskStep(None, "task-1", "publish", "pending", None, None, 0, None, None),
]
repo = FakeRunnerRepo(task, steps)
state = {
"repo": repo,
"settings": {
"ingest": {},
"paths": {},
"comment": {"enabled": True},
"collection": {"enabled": True},
"publish": {"retry_schedule_minutes": [15], "rate_limit_retry_schedule_minutes": [30]},
},
}
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_runner.record_task_action"
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
"biliup_next.app.task_runner.next_runnable_step", return_value=("publish", None)
), patch(
"biliup_next.app.task_runner.execute_step",
side_effect=ModuleError(code="PUBLISH_RATE_LIMITED", message="rate limited", retryable=True),
):
result = process_task("task-1")
self.assertEqual(result["processed"][-1]["retry_status"], "failed_retryable")
self.assertEqual(result["processed"][-1]["next_retry_delay_seconds"], 1800)
self.assertEqual(repo.step_updates[-1][1], "publish")
self.assertEqual(repo.step_updates[-1][2], "failed_retryable")
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,84 @@
from __future__ import annotations
import unittest
from types import SimpleNamespace
from biliup_next.app.retry_meta import retry_meta_for_step
from biliup_next.app.task_engine import next_runnable_step
from biliup_next.app.task_policies import resolve_failure
from biliup_next.core.errors import ModuleError
from biliup_next.core.models import TaskStep
class _Repo:
def __init__(self) -> None:
self.steps = [TaskStep(None, "task-1", "transcribe", "running", None, None, 0, None, None)]
self.step_updates: list[tuple] = []
self.task_updates: list[tuple] = []
def list_steps(self, task_id: str): # noqa: ANN001
return list(self.steps)
def get_task(self, task_id: str): # noqa: ANN001
return SimpleNamespace(id=task_id, status="running")
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # noqa: ANN001
self.step_updates.append((task_id, step_name, status, kwargs))
self.steps = [TaskStep(None, task_id, step_name, status, kwargs.get("error_code"), kwargs.get("error_message"), kwargs.get("retry_count", 0), kwargs.get("started_at"), kwargs.get("finished_at"))]
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status, updated_at))
class TranscribeRetryPolicyTests(unittest.TestCase):
def test_retry_meta_reports_wait_window_for_transcribe(self) -> None:
step = TaskStep(None, "task-1", "transcribe", "failed_retryable", "ERR", "boom", 1, None, "2099-01-01T00:00:00+00:00")
payload = retry_meta_for_step(step, {"transcribe": {"retry_schedule_minutes": [10]}})
self.assertIsNotNone(payload)
self.assertFalse(payload["retry_due"])
self.assertEqual(payload["retry_wait_seconds"], 600)
def test_next_runnable_step_waits_for_retryable_transcribe(self) -> None:
task = SimpleNamespace(id="task-1", status="failed_retryable")
steps = {
"transcribe": TaskStep(None, "task-1", "transcribe", "failed_retryable", "ERR", "boom", 1, None, "2099-01-01T00:00:00+00:00"),
}
state = {
"settings": {
"transcribe": {"retry_schedule_minutes": [10]},
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
}
}
step_name, waiting_payload = next_runnable_step(task, steps, state)
self.assertIsNone(step_name)
self.assertIsNotNone(waiting_payload)
self.assertEqual(waiting_payload["step"], "transcribe")
def test_resolve_failure_adds_transcribe_retry_delay(self) -> None:
repo = _Repo()
task = SimpleNamespace(id="task-1", status="running")
state = {
"settings": {
"transcribe": {"retry_schedule_minutes": [5, 10]},
"publish": {},
"comment": {},
"paths": {},
"collection": {"enabled": True},
}
}
result = resolve_failure(task, repo, state, ModuleError(code="GROQ_TRANSCRIBE_FAILED", message="boom", retryable=True))
self.assertEqual(result["payload"]["retry_status"], "failed_retryable")
self.assertEqual(result["payload"]["next_retry_delay_seconds"], 300)
if __name__ == "__main__":
unittest.main()

170
tests/test_video_links.py Normal file
View File

@ -0,0 +1,170 @@
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from types import SimpleNamespace
from unittest.mock import patch
import subprocess
from biliup_next.infra.adapters.full_video_locator import fetch_biliup_list
from biliup_next.infra.video_links import link_context_for_task
class VideoLinksTests(unittest.TestCase):
def test_fetch_biliup_list_keeps_pubing_videos(self) -> None:
output = (
"2026-04-22 15:56:43 INFO biliup_cli::uploader: user: test\n"
"BVREVIEW\t王海颖唱歌录播 04月22日 15时56分\t审核中\n"
"BVPUB\t王海颖唱歌录播 04月20日 22时08分\t开放浏览\n"
"BVPRIVATE\t私密视频\t仅自己可见\n"
)
with patch(
"biliup_next.infra.adapters.full_video_locator.subprocess.run",
return_value=subprocess.CompletedProcess(["biliup"], 0, stdout=output, stderr=""),
):
videos = fetch_biliup_list({"biliup_path": "biliup", "cookie_file": "cookies.json"}, max_pages=1)
self.assertEqual(
videos,
[
{"bvid": "BVREVIEW", "title": "王海颖唱歌录播 04月22日 15时56分"},
{"bvid": "BVPUB", "title": "王海颖唱歌录播 04月20日 22时08分"},
],
)
def test_previous_live_falls_back_to_biliup_list(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
source_path = Path(tmpdir) / "source.mp4"
source_path.write_bytes(b"")
task = SimpleNamespace(
id="task-current",
title="王海颖唱歌录播 04月19日 22时10分",
source_path=str(source_path),
)
repo = SimpleNamespace(get_task_context=lambda task_id: None)
settings = {"biliup_path": "biliup", "cookie_file": "cookies.json"}
with patch(
"biliup_next.infra.video_links.fetch_biliup_list",
return_value=[
{"bvid": "BVPURE", "title": "【王海颖 (歌曲纯享版)】 04月18日 22时06分 共10首歌"},
{"bvid": "BVNEWER", "title": "王海颖唱歌录播 04月20日 22时00分"},
{"bvid": "BVPREV", "title": "王海颖唱歌录播 04月18日 22时06分"},
{"bvid": "BVOLDER", "title": "王海颖唱歌录播 04月17日 22时00分"},
],
):
context = link_context_for_task(task, repo, settings)
self.assertEqual(context["previous_full_video_bvid"], "BVPREV")
self.assertEqual(context["previous_full_video_link"], "https://www.bilibili.com/video/BVPREV")
self.assertEqual(context["previous_pure_video_bvid"], "BVPURE")
self.assertEqual(context["previous_pure_video_link"], "https://www.bilibili.com/video/BVPURE")
def test_previous_live_merges_repo_and_biliup_list_links(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
current_path = root / "current" / "source.mp4"
previous_path = root / "previous" / "source.mp4"
current_path.parent.mkdir()
previous_path.parent.mkdir()
current_path.write_bytes(b"")
previous_path.write_bytes(b"")
(previous_path.parent / "full_video_bvid.txt").write_text("BVLOCALFULL", encoding="utf-8")
task = SimpleNamespace(
id="task-current",
title="王海颖唱歌录播 04月19日 22时10分",
source_path=str(current_path),
)
previous_task = SimpleNamespace(
id="task-previous",
title="王海颖唱歌录播 04月18日 22时06分",
source_path=str(previous_path),
)
current_context = SimpleNamespace(
task_id=task.id,
streamer="王海颖",
session_key="王海颖-0419",
segment_started_at="2026-04-19T22:10:00",
)
previous_context = SimpleNamespace(
task_id=previous_task.id,
streamer="王海颖",
session_key="王海颖-0418",
segment_started_at="2026-04-18T22:06:00",
full_video_bvid="BVLOCALFULL",
)
tasks = {task.id: task, previous_task.id: previous_task}
contexts = {task.id: current_context, previous_task.id: previous_context}
repo = SimpleNamespace(
get_task_context=lambda task_id: contexts.get(task_id),
get_task=lambda task_id: tasks.get(task_id),
find_recent_task_contexts=lambda streamer, limit=50: [current_context, previous_context],
)
settings = {"biliup_path": "biliup", "cookie_file": "cookies.json"}
with patch(
"biliup_next.infra.video_links.fetch_biliup_list",
return_value=[
{"bvid": "BVPURE", "title": "【王海颖(歌曲纯享版)】04月18日 22时06分 共18首歌"},
],
):
context = link_context_for_task(task, repo, settings)
self.assertEqual(context["previous_full_video_bvid"], "BVLOCALFULL")
self.assertEqual(context["previous_full_video_link"], "https://www.bilibili.com/video/BVLOCALFULL")
self.assertEqual(context["previous_pure_video_bvid"], "BVPURE")
self.assertEqual(context["previous_pure_video_link"], "https://www.bilibili.com/video/BVPURE")
def test_previous_live_biliup_list_handles_year_boundary(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
source_path = Path(tmpdir) / "source.mp4"
source_path.write_bytes(b"")
task = SimpleNamespace(
id="task-current",
title="王海颖唱歌录播 01月01日 22时10分",
source_path=str(source_path),
)
repo = SimpleNamespace(get_task_context=lambda task_id: None)
settings = {"biliup_path": "biliup", "cookie_file": "cookies.json"}
with patch(
"biliup_next.infra.video_links.fetch_biliup_list",
return_value=[
{"bvid": "BVPREV", "title": "王海颖唱歌录播 12月31日 22时06分"},
],
):
context = link_context_for_task(task, repo, settings)
self.assertEqual(context["previous_full_video_bvid"], "BVPREV")
def test_current_full_video_falls_back_to_biliup_list(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
source_path = Path(tmpdir) / "source.mp4"
source_path.write_bytes(b"")
task = SimpleNamespace(
id="task-current",
title="王海颖唱歌录播 04月22日 15时56分",
source_path=str(source_path),
)
repo = SimpleNamespace(get_task_context=lambda task_id: None)
settings = {"biliup_path": "biliup", "cookie_file": "cookies.json"}
with patch(
"biliup_next.infra.adapters.full_video_locator.fetch_biliup_list",
return_value=[
{"bvid": "BVFULL", "title": "王海颖唱歌录播 04月22日 15时56分"},
{"bvid": "BVPURE", "title": "【王海颖 (歌曲纯享版)】 04月22日 15时56分 共20首歌"},
],
):
context = link_context_for_task(task, repo, settings)
self.assertEqual(context["current_full_video_bvid"], "BVFULL")
self.assertEqual(context["current_full_video_link"], "https://www.bilibili.com/video/BVFULL")
self.assertEqual((source_path.parent / "full_video_bvid.txt").read_text(encoding="utf-8"), "BVFULL")
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,116 @@
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from types import SimpleNamespace
from biliup_next.core.models import Task, utc_now_iso
from biliup_next.infra.workspace_cleanup import WorkspaceCleanupService
class _FakeRepo:
def __init__(self, tasks: list[Task], session_key: str | None = None) -> None:
self.tasks = {task.id: task for task in tasks}
self.session_key = session_key
self.deleted_artifacts: list[tuple[str, str]] = []
self.deleted_artifact_paths: list[tuple[str, str]] = []
def get_task(self, task_id: str) -> Task | None:
return self.tasks.get(task_id)
def get_task_context(self, task_id: str): # noqa: ANN201
if self.session_key is None or task_id not in self.tasks:
return None
return SimpleNamespace(task_id=task_id, session_key=self.session_key)
def list_task_contexts_by_session_key(self, session_key: str): # noqa: ANN201
if session_key != self.session_key:
return []
return [SimpleNamespace(task_id=task_id, session_key=session_key) for task_id in self.tasks]
def delete_artifacts(self, task_id: str, artifact_type: str) -> None:
self.deleted_artifacts.append((task_id, artifact_type))
def delete_artifact_by_path(self, task_id: str, path: str) -> None:
self.deleted_artifact_paths.append((task_id, path))
def _make_task(task_id: str, root: Path) -> Task:
now = utc_now_iso()
work_dir = root / task_id
work_dir.mkdir(parents=True)
source = work_dir / "source.mp4"
source.write_bytes(b"source")
for dirname in ("split_video", "publish_video"):
video_dir = work_dir / dirname
video_dir.mkdir()
(video_dir / "01_song.mp4").write_bytes(b"clip")
return Task(task_id, "local_file", str(source), task_id, "collection_synced", now, now)
class WorkspaceCleanupServiceTests(unittest.TestCase):
def test_cleanup_removes_source_split_and_publish_video_for_single_task(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = _make_task("task-1", root)
repo = _FakeRepo([task])
result = WorkspaceCleanupService(repo).cleanup_task_outputs(
task.id,
{
"delete_source_video_after_collection_synced": True,
"delete_split_videos_after_collection_synced": True,
},
)
work_dir = root / "task-1"
self.assertFalse((work_dir / "source.mp4").exists())
self.assertFalse((work_dir / "split_video").exists())
self.assertFalse((work_dir / "publish_video").exists())
self.assertEqual(result["task_ids"], ["task-1"])
self.assertEqual(repo.deleted_artifacts, [("task-1", "clip_video")])
self.assertEqual(repo.deleted_artifact_paths, [("task-1", str((work_dir / "source.mp4").resolve()))])
def test_cleanup_removes_all_tasks_in_same_session(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task_1 = _make_task("task-1", root)
task_2 = _make_task("task-2", root)
repo = _FakeRepo([task_1, task_2], session_key="session-1")
result = WorkspaceCleanupService(repo).cleanup_task_outputs(
task_1.id,
{
"delete_source_video_after_collection_synced": True,
"delete_split_videos_after_collection_synced": True,
},
)
for task_id in ("task-1", "task-2"):
work_dir = root / task_id
self.assertFalse((work_dir / "source.mp4").exists())
self.assertFalse((work_dir / "split_video").exists())
self.assertFalse((work_dir / "publish_video").exists())
self.assertEqual(result["task_ids"], ["task-1", "task-2"])
self.assertEqual(repo.deleted_artifacts, [("task-1", "clip_video"), ("task-2", "clip_video")])
def test_cleanup_skips_missing_source_video(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = _make_task("task-1", root)
source = Path(task.source_path)
source.unlink()
repo = _FakeRepo([task])
result = WorkspaceCleanupService(repo).cleanup_task_outputs(
task.id,
{
"delete_source_video_after_collection_synced": True,
"delete_split_videos_after_collection_synced": False,
},
)
self.assertIn(str(source.resolve()), result["skipped"])
self.assertEqual(repo.deleted_artifact_paths, [])
if __name__ == "__main__":
unittest.main()