Initial commit: sanitize repository for remote push

This commit is contained in:
theshy
2026-03-21 01:36:28 +08:00
commit 3925cb508f
21 changed files with 3357 additions and 0 deletions

351
archive_scripts/upload-1.py Normal file
View File

@ -0,0 +1,351 @@
import os
import time
import subprocess
import json
import re
import random
import shutil
from pathlib import Path
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
from logger import get_system_logger, log_exception
# ==========================================
# 接口配置
# ==========================================
SESSION_DIR = r'./session' # 监控的工作区目录
CHECK_INTERVAL = 5 # 检查频率
BILIUP_PATH = "./biliup" # biliup 命令
CONFIG_FILE = "upload_config.json" # 配置文件路径
DONE_FLAG = "split_done.flag" # monitorSongs.py 生成的标记
UPLOAD_FLAG = "upload_done.flag" # 本脚本生成的完成标记
# 初始化日志
logger = get_system_logger('upload')
# ==========================================
class UploadConfig:
"""上传配置管理器"""
def __init__(self, config_path):
self.config_path = Path(config_path)
self.config = self.load_config()
def load_config(self):
"""加载配置文件"""
try:
if not self.config_path.exists():
logger.error(f"配置文件不存在: {self.config_path}")
return self.get_default_config()
with open(self.config_path, 'r', encoding='utf-8') as f:
config = json.load(f)
logger.info(f"成功加载配置文件: {self.config_path}")
return config
except Exception as e:
log_exception(logger, e, "加载配置文件失败")
return self.get_default_config()
def get_default_config(self):
"""默认配置"""
logger.warning("使用默认配置")
return {
"upload_settings": {
"tid": 31,
"copyright": 2,
"source": "直播回放",
"cover": ""
},
"template": {
"title": "{streamer}_{date}",
"description": "录制剪辑\n\n{songs_list}",
"tag": "翻唱,直播切片,唱歌,音乐",
"dynamic": ""
},
"streamers": {},
"quotes": [],
"filename_patterns": {
"patterns": []
}
}
def parse_filename(self, filename):
"""从文件名解析主播名和日期"""
patterns = self.config.get("filename_patterns", {}).get("patterns", [])
for pattern_config in patterns:
regex = pattern_config.get("regex")
if not regex:
continue
match = re.match(regex, filename)
if match:
data = match.groupdict()
date_format = pattern_config.get("date_format", "{date}")
# 格式化日期
try:
formatted_date = date_format.format(**data)
data['date'] = formatted_date
except KeyError:
pass
logger.debug(f"文件名匹配成功: {pattern_config.get('name')} -> {data}")
return data
# 默认返回原始文件名
logger.warning(f"文件名未匹配任何模式: {filename}")
return {"streamer": filename, "date": ""}
def get_random_quote(self):
"""随机获取一句名言"""
quotes = self.config.get("quotes", [])
if not quotes:
return {"text": "", "author": ""}
return random.choice(quotes)
class UploadHandler(FileSystemEventHandler):
def __init__(self, config):
self.processing_sets = set()
self.config = config
def on_created(self, event):
# 兼容处理 watchdog 路径编码问题
src_path = event.src_path
if isinstance(src_path, bytes):
src_path = src_path.decode('utf-8')
# 监听 split_done.flag 文件的生成
if not event.is_directory and src_path.lower().endswith(DONE_FLAG):
logger.debug(f"检测到切割完成标记: {src_path}")
self.handle_upload(Path(src_path))
def on_moved(self, event):
dest_path = event.dest_path
if isinstance(dest_path, bytes):
dest_path = dest_path.decode('utf-8')
if not event.is_directory and dest_path.lower().endswith(DONE_FLAG):
logger.debug(f"检测到切割完成标记移动: {dest_path}")
self.handle_upload(Path(dest_path))
def handle_upload(self, flag_path):
work_dir = flag_path.parent
video_stem = work_dir.name
upload_done = work_dir / UPLOAD_FLAG
split_dir = work_dir / "split_video"
# 防重复检查
if upload_done.exists() or video_stem in self.processing_sets:
logger.debug(f"上传已完成或正在处理,跳过: {video_stem}")
return
logger.info("="*50)
logger.info(f"准备上传: {video_stem}")
logger.info("="*50)
self.processing_sets.add(video_stem)
try:
# 1. 解析文件名
parsed = self.config.parse_filename(video_stem)
streamer = parsed.get('streamer', video_stem)
date = parsed.get('date', '')
logger.info(f"主播: {streamer}, 日期: {date}")
# 2. 读取歌曲信息
songs_json = work_dir / "songs.json"
songs_txt = work_dir / "songs.txt"
songs = []
song_count = 0
songs_list = ""
if songs_json.exists():
try:
with open(songs_json, 'r', encoding='utf-8') as f:
data = json.load(f)
songs = data.get('songs', [])
song_count = len(songs)
logger.info(f"读取到 {song_count} 首歌曲")
except Exception as e:
log_exception(logger, e, "读取 songs.json 失败")
if songs_txt.exists():
songs_list = songs_txt.read_text(encoding='utf-8').strip()
logger.info("已读取歌单文本")
# 3. 获取随机名言
quote = self.config.get_random_quote()
daily_quote = quote.get('text', '')
quote_author = quote.get('author', '')
# 4. 构建模板变量
template_vars = {
'streamer': streamer,
'date': date,
'song_count': song_count,
'songs_list': songs_list,
'daily_quote': daily_quote,
'quote_author': quote_author
}
# 5. 渲染标题和简介
template = self.config.config.get('template', {})
title = template.get('title', '{streamer}_{date}').format(**template_vars)
description = template.get('description', '{songs_list}').format(**template_vars)
dynamic = template.get('dynamic', '').format(**template_vars)
# 6. 获取标签(优先使用主播专属标签)
streamers_config = self.config.config.get('streamers', {})
if streamer in streamers_config:
tags = streamers_config[streamer].get('tags', template.get('tag', ''))
logger.info(f"使用主播专属标签: {streamer}")
else:
tags = template.get('tag', '翻唱,唱歌,音乐').format(**template_vars)
logger.info(f"标题: {title}")
logger.info(f"标签: {tags}")
logger.debug(f"简介预览: {description[:100]}...")
# 7. 获取所有切片视频
video_files = sorted([str(v) for v in split_dir.glob("*") if v.suffix.lower() in {'.mp4', '.mkv', '.mov', '.flv'}])
if not video_files:
logger.error(f"切片目录 {split_dir} 内没找到视频")
return
logger.info(f"找到 {len(video_files)} 个视频分片")
# 8. 读取上传设置
upload_settings = self.config.config.get('upload_settings', {})
tid = upload_settings.get('tid', 31)
copyright_val = upload_settings.get('copyright', 2)
source = upload_settings.get('source', '直播回放')
cover = upload_settings.get('cover', '')
# 8. 刷新 biliup 登录信息
renew_cmd = [BILIUP_PATH, "renew"]
logger.info("尝试刷新 biliup 登录信息")
renew_result = subprocess.run(renew_cmd, shell=False, capture_output=True, text=True, encoding='utf-8')
if renew_result.returncode != 0:
logger.warning(f"biliup renew 返回非 0: {renew_result.returncode}")
else:
logger.info("biliup renew 成功")
# 9. 执行分批上传
logger.info(f"启动分批投稿 (每批 5 个)...")
# 第一批:使用 upload 创建稿件
first_batch = video_files[:5]
remaining_batches = [video_files[i:i + 5] for i in range(5, len(video_files), 5)]
# 构建初始上传命令
upload_cmd = [
BILIUP_PATH, "upload",
*first_batch,
"--title", title,
"--tid", str(tid),
"--tag", tags,
"--copyright", str(copyright_val),
"--source", source,
"--desc", description
]
if dynamic:
upload_cmd.extend(["--dynamic", dynamic])
if cover and Path(cover).exists():
upload_cmd.extend(["--cover", cover])
# 执行初始上传
logger.info(f"正在上传第一批 ({len(first_batch)} 个文件)...")
result = subprocess.run(upload_cmd, shell=False, capture_output=True, text=True, encoding='utf-8')
if result.returncode == 0:
# 从 stdout 提取 BV 号
bv_match = re.search(r'"bvid":"(BV[A-Za-z0-9]+)"', result.stdout)
if not bv_match:
bv_match = re.search(r'(BV[A-Za-z0-9]+)', result.stdout)
if bv_match:
bvid = bv_match.group(1)
logger.info(f"第一批投稿成功,获得 BV 号: {bvid}")
# 追加后续批次
for idx, batch in enumerate(remaining_batches, 2):
logger.info(f"正在追加第 {idx} 批 ({len(batch)} 个文件) 到 {bvid}...")
time.sleep(15) # 适当等待
append_cmd = [
BILIUP_PATH, "append",
"--vid", bvid,
*batch
]
append_res = subprocess.run(append_cmd, shell=False, capture_output=True, text=True, encoding='utf-8')
if append_res.returncode != 0:
logger.error(f"{idx} 批追加失败: {append_res.stderr[:200]}")
logger.info(f"所有批次处理完成: {video_stem}")
upload_done.touch()
# 上传成功后清理空间
try:
if split_dir.exists():
shutil.rmtree(split_dir)
logger.info(f"已删除切片目录: {split_dir}")
for ext in ['.mp4', '.mkv', '.mov', '.flv', '.ts']:
original_video = work_dir / f"{video_stem}{ext}"
if original_video.exists():
original_video.unlink()
logger.info(f"已删除原视频: {original_video}")
except Exception as cleanup_err:
logger.error(f"清理空间失败: {cleanup_err}")
else:
logger.error("第一批上传成功但未能在输出中识别到 BV 号,无法追加后续分片")
else:
logger.error(f"第一批投稿失败,错误码: {result.returncode}")
logger.error(f"错误信息: {result.stderr[:500]}")
except Exception as e:
log_exception(logger, e, "上传处理异常")
finally:
self.processing_sets.discard(video_stem)
logger.info("="*50)
def main():
path = Path(SESSION_DIR)
path.mkdir(parents=True, exist_ok=True)
logger.info("="*50)
logger.info("上传模块启动 (Biliup 自动分批投稿)")
logger.info("="*50)
# 加载配置
config = UploadConfig(CONFIG_FILE)
event_handler = UploadHandler(config)
observer = Observer()
observer.schedule(event_handler, str(path), recursive=True)
# 启动时扫描已有目录
logger.info("扫描待上传任务...")
scan_count = 0
for sub_dir in path.iterdir():
if sub_dir.is_dir():
split_flag = sub_dir / DONE_FLAG
upload_flag = sub_dir / UPLOAD_FLAG
if split_flag.exists() and not upload_flag.exists():
logger.info(f"发现待上传任务: {sub_dir.name}")
event_handler.handle_upload(split_flag)
scan_count += 1
logger.info(f"扫描完成,处理 {scan_count} 个待上传任务")
observer.start()
try:
while True:
time.sleep(CHECK_INTERVAL)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == "__main__":
main()