feat(material-cache): 添加缓存锁机制防止并发冲突

- 实现跨进程缓存锁获取和释放功能
- 在下载过程中使用UUID生成唯一的临时文件名避免并发覆盖
- 添加超时机制和轮询间隔控制锁等待时间
- 修改清理逻辑跳过锁文件和下载中的临时文件
- 添加测试验证缓存锁功能正常工作

fix(ffmpeg): 优化FFmpeg命令执行和错误处理

- 添加默认日志级别为error减少冗余输出
- 修复subprocess运行参数传递方式
- 改进错误信息截取避免空值解码异常

refactor(system-info): 优化系统信息获取和缓存机制

- 实现FFmpeg版本、编解码器信息缓存避免重复查询
- 添加系统信息TTL缓存机制提升性能
- 实现GPU信息检查状态缓存避免重复检测
- 整合静态系统信息和动态信息分离处理

refactor(storage): 优化HTTP上传下载资源管理

- 使用上下文管理器确保请求连接正确关闭
- 修改rclone命令构建方式从字符串改为列表形式
- 改进错误处理截取stderr输出长度限制
- 优化响应处理避免资源泄露
This commit is contained in:
2026-01-19 20:03:18 +08:00
parent 0cc96a968b
commit b291f33486
6 changed files with 238 additions and 96 deletions

View File

@@ -7,6 +7,7 @@
import os
import logging
import subprocess
from typing import Optional
import requests
@@ -73,16 +74,16 @@ def upload_file(url: str, file_path: str, max_retries: int = 5, timeout: int = 6
while retries < max_retries:
try:
with open(file_path, 'rb') as f:
response = requests.put(
with requests.put(
http_url,
data=f,
stream=True,
timeout=timeout,
headers={"Content-Type": "application/octet-stream"}
)
response.raise_for_status()
logger.info(f"Upload succeeded: {file_path}")
return True
) as response:
response.raise_for_status()
logger.info(f"Upload succeeded: {file_path}")
return True
except requests.exceptions.Timeout:
retries += 1
@@ -111,7 +112,6 @@ def _upload_with_rclone(url: str, file_path: str) -> bool:
return False
config_file = os.getenv("RCLONE_CONFIG_FILE", "")
rclone_config = f"--config {config_file}" if config_file else ""
# 替换 URL
new_url = url
@@ -123,19 +123,30 @@ def _upload_with_rclone(url: str, file_path: str) -> bool:
if new_url == url:
return False
cmd = (
f"rclone copyto --no-check-dest --ignore-existing "
f"--multi-thread-chunk-size 8M --multi-thread-streams 8 "
f"{rclone_config} {file_path} {new_url}"
)
logger.debug(f"rclone command: {cmd}")
cmd = [
"rclone",
"copyto",
"--no-check-dest",
"--ignore-existing",
"--multi-thread-chunk-size",
"8M",
"--multi-thread-streams",
"8",
]
if config_file:
cmd.extend(["--config", config_file])
cmd.extend([file_path, new_url])
result = os.system(cmd)
if result == 0:
logger.debug(f"rclone command: {' '.join(cmd)}")
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode == 0:
logger.info(f"rclone upload succeeded: {file_path}")
return True
logger.warning(f"rclone upload failed (code={result}): {file_path}")
stderr = (result.stderr or '').strip()
stderr = stderr[:500] if stderr else ""
logger.warning(f"rclone upload failed (code={result.returncode}): {file_path} {stderr}")
return False
@@ -177,13 +188,13 @@ def download_file(
retries = 0
while retries < max_retries:
try:
response = requests.get(http_url, timeout=timeout, stream=True)
response.raise_for_status()
with requests.get(http_url, timeout=timeout, stream=True) as response:
response.raise_for_status()
with open(file_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
with open(file_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
file_size = os.path.getsize(file_path)
logger.info(f"Download succeeded: {file_path} ({file_size} bytes)")