feat: add session-level publish and comment flow

This commit is contained in:
theshy
2026-04-14 16:00:17 +08:00
parent 862db502b0
commit d5d9693581
42 changed files with 2478 additions and 181 deletions

View File

@ -465,6 +465,35 @@ class ApiServerTests(unittest.TestCase):
self.assertEqual(body["id"], "task-new")
self.assertEqual(body["source_path"], source_path)
def test_post_tasks_creates_task_from_bilibili_url(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
source_url = "https://www.bilibili.com/video/BV1TEST1234"
created_task = Task(
id="task-bv",
source_type="bilibili_url",
source_path=str(Path(tmpdir) / "session" / "task-bv" / "task-bv.mp4"),
title="video-title",
status="created",
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
ingest_service = SimpleNamespace(create_task_from_url=lambda url, settings: created_task)
repo = FakeRepo(created_task)
state = self._state(tmpdir, repo, ingest_service=ingest_service)
state["settings"]["ingest"] = {"provider": "bilibili_url", "yt_dlp_cmd": "yt-dlp"}
response_status, _, body = self._request(
"POST",
"/tasks",
state,
body=json.dumps({"source_type": "bilibili_url", "source_url": source_url}).encode("utf-8"),
headers={"Content-Type": "application/json"},
)
self.assertEqual(response_status, 201)
self.assertEqual(body["id"], "task-bv")
self.assertEqual(body["source_type"], "bilibili_url")
def test_post_run_task_action_returns_accepted_payload(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task(

View File

@ -0,0 +1,256 @@
from __future__ import annotations
import json
import tempfile
import unittest
from pathlib import Path
from biliup_next.core.models import Task, utc_now_iso
from biliup_next.core.errors import ModuleError
from biliup_next.modules.comment.providers.bilibili_top_comment import BilibiliTopCommentProvider
class _FakeBilibiliApi:
def __init__(self) -> None:
self.reply_calls: list[dict[str, object]] = []
def load_cookies(self, path: Path) -> dict[str, str]:
return {"bili_jct": "csrf-token"}
def build_session(self, *, cookies: dict[str, str], referer: str, origin: str | None = None) -> object:
return object()
def get_video_view(self, session, bvid: str, *, error_code: str, error_message: str) -> dict[str, object]:
return {"aid": 123}
def add_reply(self, session, *, csrf: str, aid: int, content: str, error_message: str) -> dict[str, object]:
self.reply_calls.append({"aid": aid, "content": content, "error_message": error_message})
raise ModuleError(
code="COMMENT_POST_FAILED",
message=f"{error_message}: 当前页面评论功能已关闭",
retryable=True,
)
def top_reply(self, session, *, csrf: str, aid: int, rpid: int, error_message: str) -> None:
raise AssertionError("top_reply should not be called when comment is disabled")
class BilibiliTopCommentProviderTests(unittest.TestCase):
def test_split_comment_aggregates_session_parts_on_anchor_task(self) -> None:
api = _FakeBilibiliApi()
provider = BilibiliTopCommentProvider(bilibili_api=api)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task("task-1", "local_file", str(root / "source-1.mp4"), "task-1", "published", utc_now_iso(), utc_now_iso())
task_dir_1 = root / "task-1"
task_dir_2 = root / "task-2"
task_dir_1.mkdir(parents=True, exist_ok=True)
task_dir_2.mkdir(parents=True, exist_ok=True)
(task_dir_1 / "songs.txt").write_text("00:00:00 Song A — Artist A\n", encoding="utf-8")
(task_dir_1 / "songs.json").write_text(json.dumps({"songs": [{"title": "Song A", "artist": "Artist A"}]}), encoding="utf-8")
(task_dir_1 / "bvid.txt").write_text("BV1SPLIT111", encoding="utf-8")
(task_dir_2 / "songs.txt").write_text("00:00:00 Song B — Artist B\n", encoding="utf-8")
(task_dir_2 / "songs.json").write_text(json.dumps({"songs": [{"title": "Song B", "artist": "Artist B"}]}), encoding="utf-8")
cookies_file = root / "cookies.json"
cookies_file.write_text("{}", encoding="utf-8")
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
mapping = {
"task-1": type("Ctx", (), {"task_id": "task-1", "session_key": "session-1", "segment_started_at": "2026-04-04T09:23:00+08:00", "source_title": "part-1"})(),
"task-2": type("Ctx", (), {"task_id": "task-2", "session_key": "session-1", "segment_started_at": "2026-04-04T09:25:00+08:00", "source_title": "part-2"})(),
}
return mapping[task_id]
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [self.get_task_context("task-1"), self.get_task_context("task-2")]
result = provider.comment(
task,
{
"session_dir": str(root),
"cookies_file": str(cookies_file),
"post_split_comment": True,
"post_full_video_timeline_comment": False,
"__repo": _Repo(),
},
)
self.assertEqual(result["status"], "ok")
self.assertEqual(result["split"]["status"], "skipped")
self.assertEqual(result["split"]["reason"], "comment_disabled")
self.assertEqual(len(api.reply_calls), 1)
self.assertIn("P1:\n1. Song A — Artist A", api.reply_calls[0]["content"])
self.assertIn("P2:\n1. Song B — Artist B", api.reply_calls[0]["content"])
def test_split_comment_skips_on_non_anchor_task(self) -> None:
api = _FakeBilibiliApi()
provider = BilibiliTopCommentProvider(bilibili_api=api)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task("task-2", "local_file", str(root / "source-2.mp4"), "task-2", "published", utc_now_iso(), utc_now_iso())
task_dir = root / "task-2"
task_dir.mkdir(parents=True, exist_ok=True)
(task_dir / "songs.txt").write_text("00:00:00 Song B — Artist B\n", encoding="utf-8")
(task_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Song B", "artist": "Artist B"}]}), encoding="utf-8")
(task_dir / "bvid.txt").write_text("BV1SPLIT222", encoding="utf-8")
cookies_file = root / "cookies.json"
cookies_file.write_text("{}", encoding="utf-8")
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
mapping = {
"task-1": type("Ctx", (), {"task_id": "task-1", "session_key": "session-1", "segment_started_at": "2026-04-04T09:23:00+08:00", "source_title": "part-1"})(),
"task-2": type("Ctx", (), {"task_id": "task-2", "session_key": "session-1", "segment_started_at": "2026-04-04T09:25:00+08:00", "source_title": "part-2"})(),
}
return mapping[task_id]
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [self.get_task_context("task-1"), self.get_task_context("task-2")]
result = provider.comment(
task,
{
"session_dir": str(root),
"cookies_file": str(cookies_file),
"post_split_comment": True,
"post_full_video_timeline_comment": False,
"__repo": _Repo(),
},
)
self.assertEqual(result["status"], "ok")
self.assertEqual(result["split"]["status"], "skipped")
self.assertEqual(result["split"]["reason"], "session_split_comment_owned_by_anchor")
self.assertEqual(api.reply_calls, [])
def test_comment_skips_when_page_comment_is_disabled(self) -> None:
provider = BilibiliTopCommentProvider(bilibili_api=_FakeBilibiliApi())
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task(
id="task-1",
source_type="local_file",
source_path=str(root / "source.mp4"),
title="task-1",
status="published",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
work_dir = root / task.title
work_dir.mkdir(parents=True, exist_ok=True)
(work_dir / "songs.txt").write_text("00:00:00 Test Song - Tester\n", encoding="utf-8")
(work_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Test Song", "artist": "Tester"}]}), encoding="utf-8")
(work_dir / "bvid.txt").write_text("BV1COMMENT123", encoding="utf-8")
cookies_file = root / "cookies.json"
cookies_file.write_text("{}", encoding="utf-8")
result = provider.comment(
task,
{
"session_dir": str(root),
"cookies_file": str(cookies_file),
"post_split_comment": True,
"post_full_video_timeline_comment": False,
},
)
self.assertEqual(result["status"], "ok")
self.assertEqual(result["split"]["status"], "skipped")
self.assertEqual(result["split"]["reason"], "comment_disabled")
self.assertTrue((work_dir / "comment_split_done.flag").exists())
self.assertTrue((work_dir / "comment_full_done.flag").exists())
self.assertTrue((work_dir / "comment_done.flag").exists())
def test_full_comment_aggregates_session_parts_on_anchor_task(self) -> None:
api = _FakeBilibiliApi()
provider = BilibiliTopCommentProvider(bilibili_api=api)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task("task-1", "local_file", str(root / "source-1.mp4"), "task-1", "published", utc_now_iso(), utc_now_iso())
task_dir_1 = root / "task-1"
task_dir_2 = root / "task-2"
task_dir_1.mkdir(parents=True, exist_ok=True)
task_dir_2.mkdir(parents=True, exist_ok=True)
(task_dir_1 / "songs.txt").write_text("00:00:01 Song A\n00:02:00 Song B\n", encoding="utf-8")
(task_dir_1 / "songs.json").write_text(json.dumps({"songs": [{"title": "Song A"}]}), encoding="utf-8")
(task_dir_1 / "bvid.txt").write_text("BV1SPLIT111", encoding="utf-8")
(task_dir_1 / "full_video_bvid.txt").write_text("BV1FULL111", encoding="utf-8")
(task_dir_2 / "songs.txt").write_text("00:00:03 Song C\n", encoding="utf-8")
cookies_file = root / "cookies.json"
cookies_file.write_text("{}", encoding="utf-8")
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
mapping = {
"task-1": type("Ctx", (), {"task_id": "task-1", "session_key": "session-1", "segment_started_at": "2026-04-04T09:23:00+08:00", "source_title": "part-1"})(),
"task-2": type("Ctx", (), {"task_id": "task-2", "session_key": "session-1", "segment_started_at": "2026-04-04T09:25:00+08:00", "source_title": "part-2"})(),
}
return mapping[task_id]
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [self.get_task_context("task-1"), self.get_task_context("task-2")]
result = provider.comment(
task,
{
"session_dir": str(root),
"cookies_file": str(cookies_file),
"post_split_comment": False,
"post_full_video_timeline_comment": True,
"__repo": _Repo(),
},
)
self.assertEqual(result["status"], "ok")
self.assertEqual(result["full"]["status"], "skipped")
self.assertEqual(result["full"]["reason"], "comment_disabled")
self.assertEqual(len(api.reply_calls), 1)
self.assertIn("P1:\n00:00:01 Song A\n00:02:00 Song B", api.reply_calls[0]["content"])
self.assertIn("P2:\n00:00:03 Song C", api.reply_calls[0]["content"])
def test_full_comment_skips_on_non_anchor_task(self) -> None:
api = _FakeBilibiliApi()
provider = BilibiliTopCommentProvider(bilibili_api=api)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task("task-2", "local_file", str(root / "source-2.mp4"), "task-2", "published", utc_now_iso(), utc_now_iso())
task_dir = root / "task-2"
task_dir.mkdir(parents=True, exist_ok=True)
(task_dir / "songs.txt").write_text("00:00:03 Song C\n", encoding="utf-8")
(task_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Song C"}]}), encoding="utf-8")
(task_dir / "bvid.txt").write_text("BV1SPLIT222", encoding="utf-8")
(task_dir / "full_video_bvid.txt").write_text("BV1FULL111", encoding="utf-8")
cookies_file = root / "cookies.json"
cookies_file.write_text("{}", encoding="utf-8")
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
mapping = {
"task-1": type("Ctx", (), {"task_id": "task-1", "session_key": "session-1", "segment_started_at": "2026-04-04T09:23:00+08:00", "source_title": "part-1"})(),
"task-2": type("Ctx", (), {"task_id": "task-2", "session_key": "session-1", "segment_started_at": "2026-04-04T09:25:00+08:00", "source_title": "part-2"})(),
}
return mapping[task_id]
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [self.get_task_context("task-1"), self.get_task_context("task-2")]
result = provider.comment(
task,
{
"session_dir": str(root),
"cookies_file": str(cookies_file),
"post_split_comment": False,
"post_full_video_timeline_comment": True,
"__repo": _Repo(),
},
)
self.assertEqual(result["status"], "ok")
self.assertEqual(result["full"]["status"], "skipped")
self.assertEqual(result["full"]["reason"], "session_full_comment_owned_by_anchor")
self.assertEqual(api.reply_calls, [])
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,281 @@
from __future__ import annotations
import json
import subprocess
import sys
import tempfile
import unittest
from pathlib import Path
from unittest.mock import patch
from biliup_next.core.models import Artifact, Task, utc_now_iso
from biliup_next.infra.adapters.biliup_cli import BiliupCliAdapter
from biliup_next.modules.publish.providers.biliup_cli import BiliupCliPublishProvider
class _FakeBiliupAdapter:
def __init__(self) -> None:
self.optional_calls: list[dict] = []
self.run_calls: list[dict] = []
def run_optional(self, cmd: list[str], *, label: str, timeout_seconds: int | None = None, log_path: Path | None = None) -> None:
self.optional_calls.append(
{"cmd": cmd, "label": label, "timeout_seconds": timeout_seconds, "log_path": log_path}
)
def run(self, cmd: list[str], *, label: str, timeout_seconds: int | None = None, log_path: Path | None = None) -> subprocess.CompletedProcess[str]:
self.run_calls.append(
{"cmd": cmd, "label": label, "timeout_seconds": timeout_seconds, "log_path": log_path}
)
return subprocess.CompletedProcess(cmd, 0, stdout='{"bvid":"BV1TEST12345"}', stderr="")
class BiliupCliAdapterTests(unittest.TestCase):
def test_run_writes_publish_log(self) -> None:
adapter = BiliupCliAdapter()
with tempfile.TemporaryDirectory() as tmpdir:
log_path = Path(tmpdir) / "publish.log"
result = adapter.run(
[sys.executable, "-c", "print('hello from biliup adapter')"],
label="adapter smoke",
timeout_seconds=5,
log_path=log_path,
)
self.assertEqual(result.returncode, 0)
content = log_path.read_text(encoding="utf-8")
self.assertIn("adapter smoke", content)
self.assertIn("timeout_seconds: 5", content)
self.assertIn("exit: 0", content)
self.assertIn("hello from biliup adapter", content)
class BiliupCliPublishProviderTests(unittest.TestCase):
def test_publish_passes_timeout_and_log_path(self) -> None:
adapter = _FakeBiliupAdapter()
provider = BiliupCliPublishProvider(adapter=adapter)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task(
id="task-1",
source_type="local_file",
source_path=str(root / "source.mp4"),
title="task-1",
status="split_done",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
work_dir = root / task.title
work_dir.mkdir(parents=True, exist_ok=True)
(work_dir / "songs.txt").write_text("00:00:00 Test Song - Tester\n", encoding="utf-8")
(work_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Test Song"}]}), encoding="utf-8")
upload_config = root / "upload_config.json"
upload_config.write_text("{}", encoding="utf-8")
clip_path = work_dir / "clip-1.mp4"
clip_path.write_text("fake", encoding="utf-8")
clip = Artifact(
id=None,
task_id=task.id,
artifact_type="clip_video",
path=str(clip_path),
metadata_json="{}",
created_at=utc_now_iso(),
)
record = provider.publish(
task,
[clip],
{
"session_dir": str(root),
"upload_config_file": str(upload_config),
"biliup_path": "runtime/biliup",
"cookie_file": "runtime/cookies.json",
"retry_count": 2,
"command_timeout_seconds": 123,
},
)
self.assertEqual(record.bvid, "BV1TEST12345")
self.assertEqual(adapter.optional_calls[0]["timeout_seconds"], 123)
self.assertEqual(adapter.optional_calls[0]["log_path"], work_dir / "publish.log")
self.assertEqual(adapter.run_calls[0]["timeout_seconds"], 123)
self.assertEqual(adapter.run_calls[0]["log_path"], work_dir / "publish.log")
self.assertTrue((work_dir / "bvid.txt").exists())
self.assertTrue((work_dir / "upload_done.flag").exists())
def test_extract_bvid_supports_rust_debug_string_format(self) -> None:
provider = BiliupCliPublishProvider()
output = 'ResponseData { code: 0, data: Some(Object {"bvid": String("BV1N5DrBQEBg")}), message: "0" }'
self.assertEqual(provider._extract_bvid(output), "BV1N5DrBQEBg")
def test_publish_does_not_reuse_stale_bvid_without_upload_done_flag(self) -> None:
adapter = _FakeBiliupAdapter()
adapter.run = lambda cmd, *, label, timeout_seconds=None, log_path=None: subprocess.CompletedProcess( # type: ignore[method-assign]
cmd, 0, stdout='ResponseData { code: 0, data: Some(Object {"bvid": String("BV1NEW1234567")}) }', stderr=""
)
provider = BiliupCliPublishProvider(adapter=adapter)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task(
id="task-1",
source_type="local_file",
source_path=str(root / "source.mp4"),
title="task-1",
status="split_done",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
work_dir = root / task.title
work_dir.mkdir(parents=True, exist_ok=True)
(work_dir / "songs.txt").write_text("00:00:00 Test Song - Tester\n", encoding="utf-8")
(work_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Test Song"}]}), encoding="utf-8")
(work_dir / "bvid.txt").write_text("BVOLD1234567", encoding="utf-8")
upload_config = root / "upload_config.json"
upload_config.write_text("{}", encoding="utf-8")
clip_path = work_dir / "clip-1.mp4"
clip_path.write_text("fake", encoding="utf-8")
clip = Artifact(
id=None,
task_id=task.id,
artifact_type="clip_video",
path=str(clip_path),
metadata_json="{}",
created_at=utc_now_iso(),
)
record = provider.publish(
task,
[clip],
{
"session_dir": str(root),
"upload_config_file": str(upload_config),
"biliup_path": "runtime/biliup",
"cookie_file": "runtime/cookies.json",
"retry_count": 2,
"command_timeout_seconds": 123,
},
)
self.assertEqual(record.bvid, "BV1NEW1234567")
self.assertEqual((work_dir / "bvid.txt").read_text(encoding="utf-8"), "BV1NEW1234567")
def test_publish_resumes_append_when_bvid_exists_without_upload_done(self) -> None:
adapter = _FakeBiliupAdapter()
provider = BiliupCliPublishProvider(adapter=adapter)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task(
id="task-1",
source_type="local_file",
source_path=str(root / "source.mp4"),
title="task-1",
status="split_done",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
work_dir = root / task.title
work_dir.mkdir(parents=True, exist_ok=True)
(work_dir / "songs.txt").write_text("00:00:00 Test Song - Tester\n", encoding="utf-8")
(work_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Test Song"}]}), encoding="utf-8")
(work_dir / "bvid.txt").write_text("BV1RESUME1234", encoding="utf-8")
(work_dir / "publish_progress.json").write_text(
json.dumps({"bvid": "BV1RESUME1234", "completed_append_batches": []}),
encoding="utf-8",
)
upload_config = root / "upload_config.json"
upload_config.write_text("{}", encoding="utf-8")
clips = []
for index in range(1, 11):
clip_path = work_dir / f"clip-{index}.mp4"
clip_path.write_text("fake", encoding="utf-8")
clips.append(
Artifact(
id=None,
task_id=task.id,
artifact_type="clip_video",
path=str(clip_path),
metadata_json="{}",
created_at=utc_now_iso(),
)
)
with patch("biliup_next.modules.publish.providers.biliup_cli.time.sleep", return_value=None):
record = provider.publish(
task,
clips,
{
"session_dir": str(root),
"upload_config_file": str(upload_config),
"biliup_path": "runtime/biliup",
"cookie_file": "runtime/cookies.json",
"retry_count": 2,
"command_timeout_seconds": 123,
},
)
self.assertEqual(record.bvid, "BV1RESUME1234")
self.assertEqual(len(adapter.run_calls), 1)
self.assertIn("append", adapter.run_calls[0]["cmd"])
self.assertIn("BV1RESUME1234", adapter.run_calls[0]["cmd"])
self.assertTrue((work_dir / "upload_done.flag").exists())
def test_publish_creates_progress_from_existing_bvid_for_append_resume(self) -> None:
adapter = _FakeBiliupAdapter()
provider = BiliupCliPublishProvider(adapter=adapter)
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task = Task(
id="task-1",
source_type="local_file",
source_path=str(root / "source.mp4"),
title="task-1",
status="split_done",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
work_dir = root / task.title
work_dir.mkdir(parents=True, exist_ok=True)
(work_dir / "songs.txt").write_text("00:00:00 Test Song - Tester\n", encoding="utf-8")
(work_dir / "songs.json").write_text(json.dumps({"songs": [{"title": "Test Song"}]}), encoding="utf-8")
(work_dir / "bvid.txt").write_text("BV1RESUME1234", encoding="utf-8")
upload_config = root / "upload_config.json"
upload_config.write_text("{}", encoding="utf-8")
clips = []
for index in range(1, 11):
clip_path = work_dir / f"clip-{index}.mp4"
clip_path.write_text("fake", encoding="utf-8")
clips.append(
Artifact(
id=None,
task_id=task.id,
artifact_type="clip_video",
path=str(clip_path),
metadata_json="{}",
created_at=utc_now_iso(),
)
)
with patch("biliup_next.modules.publish.providers.biliup_cli.time.sleep", return_value=None):
record = provider.publish(
task,
clips,
{
"session_dir": str(root),
"upload_config_file": str(upload_config),
"biliup_path": "runtime/biliup",
"cookie_file": "runtime/cookies.json",
"retry_count": 2,
"command_timeout_seconds": 123,
},
)
self.assertEqual(record.bvid, "BV1RESUME1234")
self.assertEqual(len(adapter.run_calls), 1)
self.assertIn("append", adapter.run_calls[0]["cmd"])
self.assertFalse((work_dir / "publish_progress.json").exists())
self.assertTrue((work_dir / "upload_done.flag").exists())
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,49 @@
from __future__ import annotations
import tempfile
import unittest
from pathlib import Path
from biliup_next.modules.ingest.providers.bilibili_url import BilibiliUrlIngestProvider
class FakeYtDlpAdapter:
def probe(self, *, yt_dlp_cmd: str, source_url: str): # noqa: ANN001
return {
"id": "BV1TEST1234",
"title": "测试视频标题",
"uploader": "测试主播",
"duration": 321.0,
}
def download(self, *, yt_dlp_cmd: str, source_url: str, output_template: str, format_selector=None): # noqa: ANN001
output_path = Path(output_template.replace("%(ext)s", "mp4"))
output_path.parent.mkdir(parents=True, exist_ok=True)
output_path.write_bytes(b"fake-video")
return type("Result", (), {"returncode": 0, "stdout": "ok", "stderr": ""})()
class BilibiliUrlIngestProviderTests(unittest.TestCase):
def test_resolve_and_download_source(self) -> None:
provider = BilibiliUrlIngestProvider(yt_dlp=FakeYtDlpAdapter())
settings = {"yt_dlp_cmd": "yt-dlp"}
resolved = provider.resolve_source("https://www.bilibili.com/video/BV1TEST1234", settings)
self.assertEqual(resolved["video_id"], "BV1TEST1234")
self.assertEqual(resolved["title"], "测试视频标题")
self.assertEqual(resolved["streamer"], "测试主播")
with tempfile.TemporaryDirectory() as tmpdir:
downloaded = provider.download_source(
"https://www.bilibili.com/video/BV1TEST1234",
Path(tmpdir),
settings,
task_id=str(resolved["task_id"]),
)
self.assertTrue(downloaded.exists())
self.assertEqual(downloaded.suffix, ".mp4")
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,61 @@
from __future__ import annotations
import unittest
from biliup_next.core.models import Task, TaskContext
from biliup_next.modules.ingest.service import IngestService
class _FakeRepo:
def __init__(self, contexts: list[TaskContext]) -> None:
self.contexts = contexts
def find_recent_task_contexts(self, streamer: str) -> list[TaskContext]:
return [context for context in self.contexts if context.streamer == streamer]
class IngestSessionGroupingTests(unittest.TestCase):
def test_infer_session_key_groups_same_streamer_within_three_hours_to_earliest_title(self) -> None:
existing_context = TaskContext(
id=None,
task_id="task-1",
session_key="王海颖唱歌录播 04月04日 21时59分 p01 王海颖唱歌录播 04月04日 21时59分",
streamer="王海颖唱歌录播",
room_id=None,
source_title="王海颖唱歌录播 04月04日 21时59分 p01 王海颖唱歌录播 04月04日 21时59分",
segment_started_at="2026-04-04T21:59:00+08:00",
segment_duration_seconds=None,
full_video_bvid="BVFULL123",
created_at="2026-04-04T14:00:00+00:00",
updated_at="2026-04-04T14:00:00+00:00",
)
service = IngestService(registry=None, repo=_FakeRepo([existing_context])) # type: ignore[arg-type]
session_key, inherited_bvid = service._infer_session_key(
streamer="王海颖唱歌录播",
room_id=None,
segment_started_at="2026-04-05T00:30:00+08:00",
source_title="王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分",
gap_minutes=60,
)
self.assertEqual(session_key, existing_context.session_key)
self.assertEqual(inherited_bvid, "BVFULL123")
def test_infer_session_key_uses_current_title_when_no_recent_context_matches(self) -> None:
service = IngestService(registry=None, repo=_FakeRepo([])) # type: ignore[arg-type]
session_key, inherited_bvid = service._infer_session_key(
streamer="王海颖唱歌录播",
room_id=None,
segment_started_at="2026-04-05T00:30:00+08:00",
source_title="王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分",
gap_minutes=60,
)
self.assertEqual(session_key, "王海颖唱歌录播 04月05日 00时30分 p02 王海颖唱歌录播 04月05日 00时30分")
self.assertIsNone(inherited_bvid)
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,159 @@
from __future__ import annotations
import json
import tempfile
import unittest
from pathlib import Path
from biliup_next.core.models import Artifact, PublishRecord, Task, TaskContext, TaskStep
from biliup_next.modules.publish.service import PublishService
class _FakePublishProvider:
def __init__(self) -> None:
self.calls: list[tuple[str, list[str], dict[str, object]]] = []
def publish(self, task: Task, clip_videos: list[Artifact], settings: dict[str, object]) -> PublishRecord:
self.calls.append((task.id, [artifact.path for artifact in clip_videos], dict(settings)))
return PublishRecord(
id=None,
task_id=task.id,
platform="bilibili",
aid=None,
bvid="BV1SESSION123",
title=task.title,
published_at="2026-01-01T00:00:00+00:00",
)
class _FakeRegistry:
def __init__(self, provider) -> None: # noqa: ANN001
self.provider = provider
def get(self, provider_type: str, provider_id: str): # noqa: ANN001
return self.provider
class _FakeRepo:
def __init__(self, tasks: list[Task], contexts: list[TaskContext], artifacts: dict[str, list[Artifact]]) -> None:
self.tasks = {task.id: task for task in tasks}
self.contexts = {context.task_id: context for context in contexts}
self.artifacts = artifacts
self.publish_records: list[PublishRecord] = []
self.step_updates: list[tuple[str, str, str]] = []
self.task_updates: list[tuple[str, str]] = []
def get_task(self, task_id: str) -> Task | None:
return self.tasks.get(task_id)
def list_artifacts(self, task_id: str) -> list[Artifact]:
return list(self.artifacts.get(task_id, []))
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.contexts.get(task_id)
def list_task_contexts_by_session_key(self, session_key: str) -> list[TaskContext]:
return [context for context in self.contexts.values() if context.session_key == session_key]
def add_publish_record(self, record: PublishRecord) -> None:
self.publish_records.append(record)
def add_artifact(self, artifact: Artifact) -> None:
self.artifacts.setdefault(artifact.task_id, []).append(artifact)
def update_step_status(self, task_id: str, step_name: str, status: str, **kwargs) -> None: # noqa: ANN001
self.step_updates.append((task_id, step_name, status))
def update_task_status(self, task_id: str, status: str, updated_at: str) -> None:
self.task_updates.append((task_id, status))
task = self.tasks[task_id]
self.tasks[task_id] = Task(task.id, task.source_type, task.source_path, task.title, status, task.created_at, updated_at)
class PublishServiceTests(unittest.TestCase):
def test_anchor_task_publishes_aggregated_session_clips_and_marks_all_tasks_published(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
task1 = Task("task-1", "local_file", "/tmp/a.mp4", "task-1", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", "/tmp/b.mp4", "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
artifacts = {
"task-1": [Artifact(None, "task-1", "clip_video", str(root / "a1.mp4"), "{}", task1.created_at)],
"task-2": [Artifact(None, "task-2", "clip_video", str(root / "b1.mp4"), "{}", task2.created_at)],
}
repo = _FakeRepo([task1, task2], [ctx1, ctx2], artifacts)
service = PublishService(_FakeRegistry(provider), repo)
record = service.run("task-1", {"provider": "biliup_cli", "session_dir": str(root)})
self.assertEqual(record.bvid, "BV1SESSION123")
self.assertEqual(provider.calls[0][0], "task-1")
self.assertEqual(provider.calls[0][1], [str(root / "a1.mp4"), str(root / "b1.mp4")])
aggregate_settings = provider.calls[0][2]
aggregate_txt = Path(str(aggregate_settings["publish_songs_txt_path"]))
aggregate_json = Path(str(aggregate_settings["publish_songs_json_path"]))
self.assertTrue(aggregate_txt.exists())
self.assertTrue(aggregate_json.exists())
self.assertIn(("task-1", "published"), repo.task_updates)
self.assertIn(("task-2", "published"), repo.task_updates)
self.assertEqual(len(repo.publish_records), 2)
self.assertTrue((root / "task-1" / "bvid.txt").exists())
self.assertTrue((root / "task-2" / "bvid.txt").exists())
def test_non_anchor_task_reuses_existing_session_bvid_without_republishing(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
(root / "task-1").mkdir(parents=True, exist_ok=True)
(root / "task-1" / "bvid.txt").write_text("BV1SESSION123", encoding="utf-8")
task1 = Task("task-1", "local_file", "/tmp/a.mp4", "task-1", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", "/tmp/b.mp4", "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
repo = _FakeRepo([task1, task2], [ctx1, ctx2], {"task-2": []})
service = PublishService(_FakeRegistry(provider), repo)
record = service.run("task-2", {"provider": "biliup_cli", "session_dir": str(root)})
self.assertEqual(record.bvid, "BV1SESSION123")
self.assertEqual(provider.calls, [])
self.assertIn(("task-2", "published"), repo.task_updates)
self.assertTrue((root / "task-2" / "bvid.txt").exists())
def test_session_publish_aggregates_song_lists_for_provider_metadata(self) -> None:
provider = _FakePublishProvider()
with tempfile.TemporaryDirectory() as tmpdir:
root = Path(tmpdir)
(root / "task-1").mkdir(parents=True, exist_ok=True)
(root / "task-2").mkdir(parents=True, exist_ok=True)
(root / "task-1" / "songs.txt").write_text("00:00:00 Song A — Artist A\n", encoding="utf-8")
(root / "task-2" / "songs.txt").write_text("00:00:00 Song B — Artist B\n", encoding="utf-8")
(root / "task-1" / "songs.json").write_text('{"songs":[{"title":"Song A"},{"title":"Song A2"}]}\n', encoding="utf-8")
(root / "task-2" / "songs.json").write_text('{"songs":[{"title":"Song B"}]}\n', encoding="utf-8")
task1 = Task("task-1", "local_file", "/tmp/a.mp4", "task-1", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
task2 = Task("task-2", "local_file", "/tmp/b.mp4", "task-2", "split_done", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
ctx1 = TaskContext(None, "task-1", "session-1", "s", None, "part-1", "2026-04-04T09:23:00+08:00", None, None, task1.created_at, task1.updated_at)
ctx2 = TaskContext(None, "task-2", "session-1", "s", None, "part-2", "2026-04-04T09:25:00+08:00", None, None, task2.created_at, task2.updated_at)
artifacts = {
"task-1": [Artifact(None, "task-1", "clip_video", str(root / "a1.mp4"), "{}", task1.created_at)],
"task-2": [Artifact(None, "task-2", "clip_video", str(root / "b1.mp4"), "{}", task2.created_at)],
}
repo = _FakeRepo([task1, task2], [ctx1, ctx2], artifacts)
service = PublishService(_FakeRegistry(provider), repo)
service.run("task-1", {"provider": "biliup_cli", "session_dir": str(root)})
settings = provider.calls[0][2]
aggregate_txt = Path(str(settings["publish_songs_txt_path"])).read_text(encoding="utf-8")
aggregate_json = Path(str(settings["publish_songs_json_path"])).read_text(encoding="utf-8")
self.assertIn("P1:", aggregate_txt)
self.assertIn("Song A — Artist A", aggregate_txt)
self.assertIn("P2:", aggregate_txt)
self.assertIn("Song B — Artist B", aggregate_txt)
self.assertEqual(len(json.loads(aggregate_json)["songs"]), 3)
if __name__ == "__main__":
unittest.main()

View File

@ -11,6 +11,7 @@ from biliup_next.core.models import Task, TaskContext
class FakeRepo:
def __init__(self, task: Task, context: TaskContext | None = None, contexts: list[TaskContext] | None = None) -> None:
self.task = task
self.tasks = {task.id: task}
self.context = context
self.contexts = contexts or ([] if context is None else [context])
self.task_context_upserts: list[TaskContext] = []
@ -19,7 +20,7 @@ class FakeRepo:
self.updated_session_bvid: tuple[str, str, str] | None = None
def get_task(self, task_id: str) -> Task | None:
return self.task if task_id == self.task.id else None
return self.tasks.get(task_id)
def get_task_context(self, task_id: str) -> TaskContext | None:
return self.context if task_id == self.task.id else None
@ -78,6 +79,56 @@ class SessionDeliveryServiceTests(unittest.TestCase):
self.assertTrue(persisted_path.exists())
self.assertEqual(persisted_path.read_text(encoding="utf-8"), "BVWEBHOOK123")
def test_receive_full_video_webhook_uses_source_title_to_expand_to_session(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "published", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
anchor = TaskContext(
id=None,
task_id="task-1",
session_key="session-anchor",
streamer="streamer",
room_id="room",
source_title="anchor-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
sibling = TaskContext(
id=None,
task_id="task-2",
session_key="session-anchor",
streamer="streamer",
room_id="room",
source_title="sibling-title",
segment_started_at=None,
segment_duration_seconds=None,
full_video_bvid=None,
created_at="2026-01-01T00:00:00+00:00",
updated_at="2026-01-01T00:00:00+00:00",
)
repo = FakeRepo(task, context=anchor, contexts=[anchor, sibling])
repo.tasks["task-2"] = Task(
"task-2",
"local_file",
"/tmp/source-2.mp4",
"task-title-2",
"published",
"2026-01-01T00:00:00+00:00",
"2026-01-01T00:00:00+00:00",
)
state = {"repo": repo, "settings": {"paths": {"session_dir": str(Path(tmpdir) / "session")}}}
result = SessionDeliveryService(state).receive_full_video_webhook(
{"source_title": "anchor-title", "full_video_bvid": "BVWEBHOOK123"}
)
self.assertEqual(result["session_key"], "session-anchor")
self.assertEqual(result["updated_count"], 2)
self.assertTrue(any(binding.session_key == "session-anchor" for binding in repo.session_binding_upserts))
self.assertTrue(any(binding.source_title == "anchor-title" for binding in repo.session_binding_upserts))
def test_merge_session_returns_error_when_task_ids_empty(self) -> None:
task = Task("task-1", "local_file", "/tmp/source.mp4", "task-title", "created", "2026-01-01T00:00:00+00:00", "2026-01-01T00:00:00+00:00")
repo = FakeRepo(task)

View File

@ -28,7 +28,9 @@ class SettingsServiceTests(unittest.TestCase):
"upload_config_file": {"type": "string", "default": "runtime/upload_config.json"}
},
"ingest": {
"ffprobe_bin": {"type": "string", "default": "ffprobe"}
"ffprobe_bin": {"type": "string", "default": "ffprobe"},
"yt_dlp_cmd": {"type": "string", "default": "yt-dlp"},
"yt_dlp_format": {"type": "string", "default": ""}
},
"transcribe": {
"ffmpeg_bin": {"type": "string", "default": "ffmpeg"}
@ -37,7 +39,8 @@ class SettingsServiceTests(unittest.TestCase):
"ffmpeg_bin": {"type": "string", "default": "ffmpeg"}
},
"song_detect": {
"codex_cmd": {"type": "string", "default": "codex"}
"codex_cmd": {"type": "string", "default": "codex"},
"qwen_cmd": {"type": "string", "default": "qwen"}
},
"publish": {
"biliup_path": {"type": "string", "default": "runtime/biliup"},
@ -59,10 +62,10 @@ class SettingsServiceTests(unittest.TestCase):
"cookies_file": "runtime/cookies.json",
"upload_config_file": "runtime/upload_config.json"
},
"ingest": {"ffprobe_bin": "ffprobe"},
"ingest": {"ffprobe_bin": "ffprobe", "yt_dlp_cmd": "yt-dlp", "yt_dlp_format": ""},
"transcribe": {"ffmpeg_bin": "ffmpeg"},
"split": {"ffmpeg_bin": "ffmpeg"},
"song_detect": {"codex_cmd": "codex"},
"song_detect": {"codex_cmd": "codex", "qwen_cmd": "qwen"},
"publish": {"biliup_path": "runtime/biliup", "cookie_file": "runtime/cookies.json"}
}
""",

View File

@ -0,0 +1,77 @@
from __future__ import annotations
import json
import tempfile
import unittest
from pathlib import Path
from biliup_next.core.models import Artifact, Task, utc_now_iso
from biliup_next.modules.song_detect.providers.qwen_cli import QwenCliSongDetector
class FakeQwenCliAdapter:
def __init__(self, returncode: int = 0) -> None:
self.returncode = returncode
self.last_qwen_cmd: str | None = None
def run_song_detect(self, *, qwen_cmd: str, work_dir: Path, prompt: str): # noqa: ANN001
self.last_qwen_cmd = qwen_cmd
songs_json_path = work_dir / "songs.json"
songs_json_path.write_text(
json.dumps(
{
"songs": [
{
"start": "00:01:23,000",
"end": "00:03:45,000",
"title": "测试歌曲",
"artist": "测试歌手",
"confidence": 0.93,
"evidence": "歌词命中",
}
]
},
ensure_ascii=False,
),
encoding="utf-8",
)
return type("Result", (), {"returncode": self.returncode, "stdout": "ok", "stderr": ""})()
class SongDetectProviderTests(unittest.TestCase):
def test_qwen_cli_provider_generates_json_and_txt_artifacts(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
work_dir = Path(tmpdir)
subtitle_path = work_dir / "subtitle.srt"
subtitle_path.write_text("1\n00:00:00,000 --> 00:00:03,000\n测试字幕\n", encoding="utf-8")
provider = QwenCliSongDetector(adapter=FakeQwenCliAdapter())
task = Task(
id="task-1",
source_type="local_file",
source_path=str(work_dir / "video.mp4"),
title="task-1",
status="transcribed",
created_at=utc_now_iso(),
updated_at=utc_now_iso(),
)
subtitle = Artifact(
id=None,
task_id=task.id,
artifact_type="subtitle_srt",
path=str(subtitle_path),
metadata_json=None,
created_at=utc_now_iso(),
)
songs_json, songs_txt = provider.detect(task, subtitle, {"qwen_cmd": "qwen"})
self.assertEqual(json.loads(songs_json.metadata_json)["provider"], "qwen_cli")
self.assertEqual(json.loads(songs_txt.metadata_json)["provider"], "qwen_cli")
self.assertTrue(Path(songs_json.path).exists())
self.assertTrue(Path(songs_txt.path).exists())
self.assertIn("测试歌曲", Path(songs_txt.path).read_text(encoding="utf-8"))
if __name__ == "__main__":
unittest.main()

View File

@ -65,6 +65,69 @@ class TaskEngineTests(unittest.TestCase):
self.assertTrue(waiting_payload["waiting_for_retry"])
self.assertEqual(waiting_payload["step"], "publish")
def test_next_runnable_step_blocks_non_anchor_session_publish_until_anchor_runs(self) -> None:
task = SimpleNamespace(id="task-2", status="split_done")
steps = {
"publish": TaskStep(None, "task-2", "publish", "pending", None, None, 0, None, None),
}
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
return SimpleNamespace(task_id=task_id, session_key="session-1")
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [
SimpleNamespace(task_id="task-1", segment_started_at="2026-04-04T09:23:00+08:00", source_title="part-1"),
SimpleNamespace(task_id="task-2", segment_started_at="2026-04-04T09:25:00+08:00", source_title="part-2"),
]
def get_task(self, task_id): # noqa: ANN001
status = "split_done"
return SimpleNamespace(id=task_id, status=status)
state = {
"repo": _Repo(),
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
},
}
self.assertEqual(next_runnable_step(task, steps, state), (None, None))
def test_next_runnable_step_allows_anchor_session_publish_when_all_parts_split_done(self) -> None:
task = SimpleNamespace(id="task-1", status="split_done")
steps = {
"publish": TaskStep(None, "task-1", "publish", "pending", None, None, 0, None, None),
}
class _Repo:
def get_task_context(self, task_id): # noqa: ANN001
return SimpleNamespace(task_id=task_id, session_key="session-1")
def list_task_contexts_by_session_key(self, session_key): # noqa: ANN001
return [
SimpleNamespace(task_id="task-1", segment_started_at="2026-04-04T09:23:00+08:00", source_title="part-1"),
SimpleNamespace(task_id="task-2", segment_started_at="2026-04-04T09:25:00+08:00", source_title="part-2"),
]
def get_task(self, task_id): # noqa: ANN001
return SimpleNamespace(id=task_id, status="split_done")
state = {
"repo": _Repo(),
"settings": {
"comment": {"enabled": True},
"collection": {"enabled": True},
"paths": {},
"publish": {},
},
}
self.assertEqual(next_runnable_step(task, steps, state), ("publish", None))
if __name__ == "__main__":
unittest.main()

View File

@ -70,6 +70,26 @@ class TaskPoliciesTests(unittest.TestCase):
self.assertEqual(repo.step_updates[-1][1], "publish")
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
def test_resolve_failure_uses_rate_limit_schedule_for_publish_601(self) -> None:
task = SimpleNamespace(id="task-1", status="running")
steps = [
TaskStep(None, "task-1", "publish", "running", None, None, 0, "2026-01-01T00:00:00+00:00", None),
]
repo = FakePolicyRepo(task, steps)
state = {
"settings": {
"publish": {"retry_schedule_minutes": [15, 5], "rate_limit_retry_schedule_minutes": [30, 60]},
"comment": {},
"paths": {},
}
}
exc = ModuleError(code="PUBLISH_RATE_LIMITED", message="rate limited", retryable=True)
failure = resolve_failure(task, repo, state, exc)
self.assertEqual(failure["payload"]["next_retry_delay_seconds"], 1800)
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
if __name__ == "__main__":
unittest.main()

View File

@ -4,6 +4,7 @@ import unittest
from types import SimpleNamespace
from unittest.mock import patch
from biliup_next.core.errors import ModuleError
from biliup_next.app.task_runner import process_task
from biliup_next.core.models import TaskStep
@ -97,6 +98,39 @@ class TaskRunnerTests(unittest.TestCase):
self.assertEqual(repo.task_updates[0][1], "running")
self.assertEqual(result["processed"][0]["step"], "transcribe")
def test_process_task_marks_publish_failed_retryable_on_module_error(self) -> None:
task = SimpleNamespace(id="task-1", status="split_done", updated_at="2026-01-01T00:00:00+00:00")
steps = [
TaskStep(None, "task-1", "publish", "pending", None, None, 0, None, None),
]
repo = FakeRunnerRepo(task, steps)
state = {
"repo": repo,
"settings": {
"ingest": {},
"paths": {},
"comment": {"enabled": True},
"collection": {"enabled": True},
"publish": {"retry_schedule_minutes": [15], "rate_limit_retry_schedule_minutes": [30]},
},
}
with patch("biliup_next.app.task_runner.ensure_initialized", return_value=state), patch(
"biliup_next.app.task_runner.record_task_action"
), patch("biliup_next.app.task_runner.apply_disabled_step_fallbacks", return_value=False), patch(
"biliup_next.app.task_runner.next_runnable_step", return_value=("publish", None)
), patch(
"biliup_next.app.task_runner.execute_step",
side_effect=ModuleError(code="PUBLISH_RATE_LIMITED", message="rate limited", retryable=True),
):
result = process_task("task-1")
self.assertEqual(result["processed"][-1]["retry_status"], "failed_retryable")
self.assertEqual(result["processed"][-1]["next_retry_delay_seconds"], 1800)
self.assertEqual(repo.step_updates[-1][1], "publish")
self.assertEqual(repo.step_updates[-1][2], "failed_retryable")
self.assertEqual(repo.task_updates[-1][1], "failed_retryable")
if __name__ == "__main__":
unittest.main()