Compare commits
5 Commits
Author | SHA1 | Date | |
---|---|---|---|
753cf2160f | |||
c74de5cbf9 | |||
9d7f54d1dc | |||
82b40e3b5d | |||
c0e5b8246f |
5
.gitignore
vendored
5
.gitignore
vendored
@ -10,4 +10,7 @@ __pycache__
|
||||
venv
|
||||
build/
|
||||
dist/
|
||||
access_token
|
||||
access_token
|
||||
winsw.*
|
||||
*.json
|
||||
*.log
|
2
app.py
2
app.py
@ -11,6 +11,7 @@ from controller.api.posting_blueprint import blueprint as api_posting_blueprint
|
||||
from controller.api.video_part_blueprint import blueprint as api_video_part_blueprint
|
||||
from controller.api.biliuploader_blueprint import blueprint as api_biliuploader_blueprint
|
||||
from model import db
|
||||
from workflow.bilibili import Bilibili
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['JSON_AS_ASCII'] = False
|
||||
@ -35,4 +36,5 @@ with app.app_context():
|
||||
db.create_all(app=app)
|
||||
|
||||
if __name__ == '__main__':
|
||||
Bilibili().start()
|
||||
app.run()
|
||||
|
59
config.py
59
config.py
@ -17,21 +17,20 @@ DANMAKU_FONT_NAME = "Sarasa Term SC"
|
||||
DANMAKU_FONT_SIZE = 40
|
||||
# resolution
|
||||
VIDEO_RESOLUTION = "1280x720"
|
||||
# opacity
|
||||
DANMAKU_OPACITY = 100
|
||||
# [ffmpeg]
|
||||
# exec
|
||||
FFMPEG_EXEC = "ffmpeg"
|
||||
# hevc
|
||||
FFMPEG_USE_HEVC = False
|
||||
# nvidia_gpu
|
||||
FFMPEG_USE_NVIDIA_GPU = False
|
||||
# intel_gpu
|
||||
FFMPEG_USE_INTEL_GPU = False
|
||||
# vaapi
|
||||
FFMPEG_USE_VAAPI = False
|
||||
# bitrate
|
||||
VIDEO_BITRATE = "2.5M"
|
||||
# gop
|
||||
VIDEO_GOP = 60
|
||||
# [handbrake]
|
||||
# exec
|
||||
HANDBRAKE_EXEC = "HandBrakeCli"
|
||||
# preset_file
|
||||
HANDBRAKE_PRESET_FILE = "handbrake.json"
|
||||
# preset
|
||||
HANDBRAKE_PRESET = "NvEnc"
|
||||
# encopt
|
||||
HANDBRAKE_ENCOPT = ""
|
||||
# [video]
|
||||
# enabled
|
||||
VIDEO_ENABLED = False
|
||||
@ -71,13 +70,14 @@ def load_config():
|
||||
if config.has_section("danmaku"):
|
||||
section = config['danmaku']
|
||||
global DANMAKU_EXEC, DANMAKU_SPEED, DANMAKU_FONT_NAME, VIDEO_RESOLUTION, DANMAKU_FONT_SIZE, \
|
||||
DANMAKU_USE_DANMU2ASS, DANMAKU_USE_DANMAKUFACTORY
|
||||
DANMAKU_USE_DANMU2ASS, DANMAKU_USE_DANMAKUFACTORY, DANMAKU_OPACITY
|
||||
DANMAKU_USE_DANMU2ASS = section.getboolean('use_danmu2ass', DANMAKU_USE_DANMU2ASS)
|
||||
DANMAKU_USE_DANMAKUFACTORY = section.getboolean('use_danmakufactory', DANMAKU_USE_DANMAKUFACTORY)
|
||||
DANMAKU_EXEC = section.get('exec', DANMAKU_EXEC)
|
||||
DANMAKU_SPEED = section.getfloat('speed', DANMAKU_SPEED)
|
||||
DANMAKU_FONT_NAME = section.get('font', DANMAKU_FONT_NAME)
|
||||
DANMAKU_FONT_SIZE = section.getint('font_size', DANMAKU_FONT_SIZE)
|
||||
DANMAKU_OPACITY = section.getint('opacity', DANMAKU_OPACITY)
|
||||
VIDEO_RESOLUTION = section.get('resolution', VIDEO_RESOLUTION)
|
||||
if config.has_section("video"):
|
||||
section = config['video']
|
||||
@ -90,19 +90,19 @@ def load_config():
|
||||
if config.has_section("clip"):
|
||||
section = config['clip']
|
||||
global VIDEO_CLIP_EACH_SEC, VIDEO_CLIP_OVERFLOW_SEC
|
||||
VIDEO_CLIP_EACH_SEC = section.getfloat('each_sec', VIDEO_CLIP_EACH_SEC)
|
||||
VIDEO_CLIP_OVERFLOW_SEC = section.getfloat('overflow_sec', VIDEO_CLIP_OVERFLOW_SEC)
|
||||
VIDEO_CLIP_EACH_SEC = section.getint('each_sec', VIDEO_CLIP_EACH_SEC)
|
||||
VIDEO_CLIP_OVERFLOW_SEC = section.getint('overflow_sec', VIDEO_CLIP_OVERFLOW_SEC)
|
||||
if config.has_section("ffmpeg"):
|
||||
section = config['ffmpeg']
|
||||
global FFMPEG_EXEC, FFMPEG_USE_HEVC, FFMPEG_USE_NVIDIA_GPU, FFMPEG_USE_INTEL_GPU, VIDEO_BITRATE, \
|
||||
VIDEO_GOP, FFMPEG_USE_VAAPI
|
||||
global FFMPEG_EXEC
|
||||
FFMPEG_EXEC = section.get('exec', FFMPEG_EXEC)
|
||||
FFMPEG_USE_HEVC = section.getboolean('hevc', FFMPEG_USE_HEVC)
|
||||
FFMPEG_USE_NVIDIA_GPU = section.getboolean('nvidia_gpu', FFMPEG_USE_NVIDIA_GPU)
|
||||
FFMPEG_USE_INTEL_GPU = section.getboolean('intel_gpu', FFMPEG_USE_INTEL_GPU)
|
||||
FFMPEG_USE_VAAPI = section.getboolean('vaapi', FFMPEG_USE_VAAPI)
|
||||
VIDEO_BITRATE = section.get('bitrate', VIDEO_BITRATE)
|
||||
VIDEO_GOP = section.getfloat('gop', VIDEO_GOP)
|
||||
if config.has_section("handbrake"):
|
||||
section = config['handbrake']
|
||||
global HANDBRAKE_EXEC, HANDBRAKE_PRESET_FILE, HANDBRAKE_PRESET, HANDBRAKE_ENCOPT
|
||||
HANDBRAKE_EXEC = section.get('exec', HANDBRAKE_EXEC)
|
||||
HANDBRAKE_PRESET_FILE = section.get('preset_file', HANDBRAKE_PRESET_FILE)
|
||||
HANDBRAKE_PRESET = section.get('preset', HANDBRAKE_PRESET)
|
||||
HANDBRAKE_ENCOPT = section.get('encopt', HANDBRAKE_ENCOPT)
|
||||
if config.has_section("recorder"):
|
||||
global BILILIVE_RECORDER_DIRECTORY, XIGUALIVE_RECORDER_DIRECTORY, VIDEO_OUTPUT_DIR
|
||||
section = config['recorder']
|
||||
@ -121,6 +121,7 @@ def get_config():
|
||||
'speed': DANMAKU_SPEED,
|
||||
'font': DANMAKU_FONT_NAME,
|
||||
'font_size': DANMAKU_FONT_SIZE,
|
||||
'opacity': DANMAKU_OPACITY,
|
||||
'resolution': VIDEO_RESOLUTION,
|
||||
},
|
||||
'video': {
|
||||
@ -136,12 +137,12 @@ def get_config():
|
||||
},
|
||||
'ffmpeg': {
|
||||
'exec': FFMPEG_EXEC,
|
||||
'hevc': FFMPEG_USE_HEVC,
|
||||
'nvidia_gpu': FFMPEG_USE_NVIDIA_GPU,
|
||||
'intel_gpu': FFMPEG_USE_INTEL_GPU,
|
||||
'vaapi': FFMPEG_USE_VAAPI,
|
||||
'bitrate': VIDEO_BITRATE,
|
||||
'gop': VIDEO_GOP,
|
||||
},
|
||||
'handbrake': {
|
||||
'exec': HANDBRAKE_EXEC,
|
||||
'preset_file': HANDBRAKE_PRESET_FILE,
|
||||
'preset': HANDBRAKE_PRESET,
|
||||
'encopt': HANDBRAKE_ENCOPT,
|
||||
},
|
||||
'recorder': {
|
||||
'bili_dir': BILILIVE_RECORDER_DIRECTORY,
|
||||
|
@ -6,18 +6,15 @@ from typing import Optional
|
||||
|
||||
from flask import Blueprint, jsonify, request, current_app
|
||||
|
||||
from config import BILILIVE_RECORDER_DIRECTORY, VIDEO_TITLE, XIGUALIVE_RECORDER_DIRECTORY, VIDEO_DESC, \
|
||||
VIDEO_TAGS, VIDEO_TID, VIDEO_ENABLED
|
||||
from config import BILILIVE_RECORDER_DIRECTORY, VIDEO_TITLE, XIGUALIVE_RECORDER_DIRECTORY
|
||||
from exception.danmaku import DanmakuException
|
||||
from model import db
|
||||
from model.DanmakuClip import DanmakuClip
|
||||
from model.VideoClip import VideoClip
|
||||
from model.Workflow import Workflow
|
||||
from workflow.bilibili import IS_LIVING, IS_UPLOADING, INSTANCE as bilibili_instance, IS_ENCODING
|
||||
from workflow.bilibili import VideoPart
|
||||
from workflow.bilibili import ENCODING_QUEUE, IS_LIVING
|
||||
from workflow.danmaku import get_file_start
|
||||
from workflow.video import get_video_real_duration, duration_str_to_float
|
||||
from workflow.worker import do_workflow
|
||||
|
||||
blueprint = Blueprint("api_bilirecorder", __name__, url_prefix="/api/bilirecorder")
|
||||
|
||||
@ -35,50 +32,13 @@ def auto_submit_task():
|
||||
if len(bili_record_workflow_item.video_clips) == 0:
|
||||
print("[!]Auto Submit Fail: No Video Clips")
|
||||
return
|
||||
if VIDEO_ENABLED:
|
||||
bilibili_instance.login()
|
||||
video_title = bili_record_workflow_item.name
|
||||
_future = None
|
||||
for video_clip in bili_record_workflow_item.video_clips:
|
||||
if len(video_clip.danmaku_clips) > 0:
|
||||
print("[+]Workflow:", bili_record_workflow_item.id, "; Video:", video_clip.full_path)
|
||||
_started = True
|
||||
IS_ENCODING.set()
|
||||
_future = pool.submit(
|
||||
do_workflow,
|
||||
video_clip.full_path,
|
||||
video_clip.danmaku_clips[0].full_path,
|
||||
*[clip.full_path for clip in video_clip.danmaku_clips[1:]]
|
||||
)
|
||||
ENCODING_QUEUE.put(bili_record_workflow_item)
|
||||
clear_item()
|
||||
def _clear_encode_flag_callback(_f: "Future"):
|
||||
IS_ENCODING.clear()
|
||||
_future.add_done_callback(_clear_encode_flag_callback)
|
||||
if VIDEO_ENABLED:
|
||||
def _encode_finish_callback(_f: "Future"):
|
||||
_result = _f.result()
|
||||
if _result:
|
||||
# start uploading
|
||||
bilibili_instance.pre_upload(
|
||||
parts=[VideoPart(os.path.join(_item['base_path'], _item['file']), _item['file'])
|
||||
for _item in _result],
|
||||
max_retry=10
|
||||
)
|
||||
|
||||
_future.add_done_callback(_encode_finish_callback)
|
||||
else:
|
||||
print("[-]Workflow:", bili_record_workflow_item.id, "; Video:", video_clip.full_path, "; No Danmaku")
|
||||
if VIDEO_ENABLED and _future is not None:
|
||||
def _on_upload_finish(_f: "Future"):
|
||||
if IS_UPLOADING.is_set() or IS_LIVING.is_set() or IS_ENCODING.is_set():
|
||||
return
|
||||
bilibili_instance.finish_upload(
|
||||
title=video_title,
|
||||
desc=VIDEO_DESC,
|
||||
tid=VIDEO_TID,
|
||||
tag=VIDEO_TAGS,
|
||||
no_reprint=0)
|
||||
_future.add_done_callback(_on_upload_finish)
|
||||
|
||||
|
||||
def clear_item():
|
||||
@ -109,6 +69,7 @@ def safe_create_item():
|
||||
commit_item()
|
||||
auto_submit_task()
|
||||
bili_record_workflow_item = Workflow()
|
||||
bili_record_workflow_item.name = VIDEO_TITLE.format(datetime.utcnow().strftime("%Y%m%d"))
|
||||
else:
|
||||
bili_record_workflow_item.name = VIDEO_TITLE.format(datetime.utcnow().strftime("%Y%m%d"))
|
||||
bili_record_workflow_item.automatic = True
|
||||
|
@ -2,23 +2,3 @@ from flask import Blueprint, jsonify
|
||||
from workflow.bilibili import INSTANCE as BILIBILI_INSTANCE
|
||||
|
||||
blueprint = Blueprint("api_biliuploader", __name__, url_prefix="/api/biliuploader")
|
||||
|
||||
|
||||
@blueprint.get("/")
|
||||
def get_login_info():
|
||||
return jsonify({
|
||||
"mid": BILIBILI_INSTANCE.user_id,
|
||||
"expires": BILIBILI_INSTANCE.expires,
|
||||
"login_at": BILIBILI_INSTANCE.login_time,
|
||||
})
|
||||
|
||||
|
||||
@blueprint.post("/")
|
||||
def do_login():
|
||||
BILIBILI_INSTANCE.login()
|
||||
return get_login_info()
|
||||
|
||||
|
||||
@blueprint.post("/finish")
|
||||
def finish_uploading():
|
||||
BILIBILI_INSTANCE.finish_upload()
|
||||
|
@ -4,7 +4,8 @@ import platform
|
||||
import psutil
|
||||
from flask import Blueprint, jsonify
|
||||
|
||||
from config import DANMAKU_EXEC, FFMPEG_EXEC, BILILIVE_RECORDER_DIRECTORY, XIGUALIVE_RECORDER_DIRECTORY, VIDEO_OUTPUT_DIR
|
||||
from config import DANMAKU_EXEC, FFMPEG_EXEC, BILILIVE_RECORDER_DIRECTORY, XIGUALIVE_RECORDER_DIRECTORY, \
|
||||
VIDEO_OUTPUT_DIR, HANDBRAKE_EXEC
|
||||
from util.system import check_exec
|
||||
from workflow.bilibili import IS_LIVING, IS_UPLOADING, IS_ENCODING
|
||||
|
||||
@ -60,6 +61,7 @@ def collect_basic_status():
|
||||
},
|
||||
'exec': {
|
||||
'ffmpeg': check_exec(FFMPEG_EXEC),
|
||||
'handbrake': check_exec(HANDBRAKE_EXEC),
|
||||
'danmaku': check_exec(DANMAKU_EXEC),
|
||||
},
|
||||
'system': {
|
||||
|
@ -3,6 +3,7 @@ bs4~=0.0.1
|
||||
Flask~=2.1.2
|
||||
psutil~=5.9.0
|
||||
Flask-SQLAlchemy~=2.5.1
|
||||
SQLAlchemy<2
|
||||
lxml~=4.8
|
||||
requests~=2.28.1
|
||||
rsa~=4.8
|
@ -50,6 +50,10 @@
|
||||
<td>FFMPEG状态</td>
|
||||
<td :class="collector.basic.exec.ffmpeg ? 'success' : 'warning'"></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>HANDBRAKE状态</td>
|
||||
<td :class="collector.basic.exec.handbrake ? 'success' : 'warning'"></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>弹幕工具状态</td>
|
||||
<td :class="collector.basic.exec.danmaku ? 'success' : 'warning'"></td>
|
||||
@ -84,29 +88,30 @@
|
||||
<td>命令</td>
|
||||
<td>{{ config.ffmpeg.exec }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<table class="current-config">
|
||||
<thead>
|
||||
<tr class="table-header">
|
||||
<td colspan="2">HANDBRAKE</td>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>HEVC</td>
|
||||
<td :class="{warning: !config.ffmpeg.hevc, success: config.ffmpeg.hevc}"></td>
|
||||
<td>命令</td>
|
||||
<td>{{ config.handbrake.exec }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>VAAPI</td>
|
||||
<td :class="{warning: !config.ffmpeg.vaapi, success: config.ffmpeg.vaapi}"></td>
|
||||
<td>预设文件</td>
|
||||
<td>{{ config.handbrake.preset_file }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>嘤伟达GPU</td>
|
||||
<td :class="{warning: !config.ffmpeg.nvidia_gpu, success: config.ffmpeg.nvidia_gpu}"></td>
|
||||
<td>预设使用</td>
|
||||
<td>{{ config.handbrake.preset }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>嘤特尔GPU</td>
|
||||
<td :class="{warning: !config.ffmpeg.intel_gpu, success: config.ffmpeg.intel_gpu}"></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>视频比特率</td>
|
||||
<td>{{ config.ffmpeg.bitrate }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GOP</td>
|
||||
<td>{{ config.ffmpeg.gop }}</td>
|
||||
<td>编码器参数</td>
|
||||
<td>{{ config.handbrake.encopt }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
@ -141,6 +146,10 @@
|
||||
<td>字体大小</td>
|
||||
<td>{{ config.danmaku.font_size }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>不透明度</td>
|
||||
<td>{{ config.danmaku.opacity }}%</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>视频分辨率</td>
|
||||
<td>{{ config.danmaku.resolution }}</td>
|
||||
@ -255,6 +264,7 @@
|
||||
basic: {
|
||||
exec: {
|
||||
ffmpeg: false,
|
||||
handbrake: false,
|
||||
danmaku: false,
|
||||
},
|
||||
system: {
|
||||
@ -304,6 +314,7 @@
|
||||
speed: 0,
|
||||
font: "",
|
||||
font_size: 0,
|
||||
font_size: 100,
|
||||
resolution: "",
|
||||
},
|
||||
video: {
|
||||
@ -319,12 +330,12 @@
|
||||
},
|
||||
ffmpeg: {
|
||||
exec: "",
|
||||
hevc: false,
|
||||
nvidia_gpu: false,
|
||||
intel_gpu: false,
|
||||
vaapi: false,
|
||||
bitrate: "",
|
||||
gop: "",
|
||||
},
|
||||
handbrake: {
|
||||
exec: "",
|
||||
preset_file: "",
|
||||
preset: "",
|
||||
encopt: "",
|
||||
},
|
||||
recorder: {
|
||||
bili_dir: "",
|
||||
|
@ -1,7 +1,7 @@
|
||||
import logging
|
||||
|
||||
LOGGER = logging.getLogger("WORKFLOW")
|
||||
_ch = logging.StreamHandler()
|
||||
_ch = logging.FileHandler("workflow.log", "w", encoding="UTF-8", delay=False)
|
||||
_ch.setLevel(logging.DEBUG)
|
||||
LOGGER.setLevel(logging.DEBUG)
|
||||
LOGGER.addHandler(_ch)
|
||||
|
@ -1,143 +1,58 @@
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from queue import SimpleQueue as Queue
|
||||
from time import sleep
|
||||
|
||||
from model.Workflow import Workflow
|
||||
from workflow.video import quick_split_video
|
||||
from workflow.worker import do_workflow
|
||||
|
||||
from . import LOGGER
|
||||
from .bilibiliupload import core, VideoPart
|
||||
|
||||
|
||||
IS_LIVING = threading.Event()
|
||||
IS_ENCODING = threading.Event()
|
||||
IS_UPLOADING = threading.Event()
|
||||
ENCODING_QUEUE: "Queue[Workflow]" = Queue()
|
||||
|
||||
|
||||
class Bilibili:
|
||||
def __init__(self):
|
||||
self.access_token = ""
|
||||
self.session_id = ""
|
||||
self.user_id = ""
|
||||
self.expires = 0
|
||||
self.login_time = None
|
||||
class Bilibili(threading.Thread):
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.parts = []
|
||||
|
||||
def login(self):
|
||||
with open("access_token", "r") as f:
|
||||
self.access_token = f.read(64).strip()
|
||||
self.session_id, self.user_id, self.expires = core.login_by_access_token(self.access_token)
|
||||
self.login_time = datetime.now()
|
||||
LOGGER.info("B站登录,UID【{}】,过期时间【{}】".format(self.user_id, self.expires))
|
||||
|
||||
def upload(self,
|
||||
parts,
|
||||
title,
|
||||
tid,
|
||||
tag,
|
||||
desc,
|
||||
source='',
|
||||
cover='',
|
||||
no_reprint=1,
|
||||
):
|
||||
"""
|
||||
|
||||
:param parts: e.g. VideoPart('part path', 'part title', 'part desc'), or [VideoPart(...), VideoPart(...)]
|
||||
:type parts: VideoPart or list<VideoPart>
|
||||
:param title: video's title
|
||||
:type title: str
|
||||
:param tid: video type, see: https://member.bilibili.com/x/web/archive/pre
|
||||
or https://github.com/uupers/BiliSpider/wiki/%E8%A7%86%E9%A2%91%E5%88%86%E5%8C%BA%E5%AF%B9%E5%BA%94%E8%A1%A8
|
||||
:type tid: int
|
||||
:param tag: video's tag
|
||||
:type tag: list<str>
|
||||
:param desc: video's description
|
||||
:type desc: str
|
||||
:param source: (optional) 转载地址
|
||||
:type source: str
|
||||
:param cover: (optional) cover's URL, use method *cover_up* to get
|
||||
:type cover: str
|
||||
:param no_reprint: (optional) 0=可以转载, 1=禁止转载(default)
|
||||
:type no_reprint: int
|
||||
"""
|
||||
self.pre_upload(parts)
|
||||
self.finish_upload(title, tid, tag, desc, source, cover, no_reprint)
|
||||
self.clear()
|
||||
|
||||
def pre_upload(self, parts: "VideoPart", max_retry=5):
|
||||
"""
|
||||
:param max_retry:
|
||||
:param parts: e.g. VideoPart('part path', 'part title', 'part desc'), or [VideoPart(...), VideoPart(...)]
|
||||
:type parts: VideoPart or list<VideoPart>
|
||||
"""
|
||||
if not isinstance(parts, list):
|
||||
parts = [parts]
|
||||
|
||||
IS_UPLOADING.set()
|
||||
for part in parts:
|
||||
if isinstance(part, str):
|
||||
part = VideoPart(part)
|
||||
LOGGER.info("Start Uploading >{}<".format(part.path))
|
||||
status = core.upload_video_part(self.access_token, self.session_id, self.user_id, part, max_retry)
|
||||
if status:
|
||||
# 上传完毕
|
||||
LOGGER.info("Upload >{}< Finished;【{}】".format(part.path, part.server_file_name))
|
||||
self.parts.append(part)
|
||||
def run(self) -> None:
|
||||
while True:
|
||||
if ENCODING_QUEUE.empty():
|
||||
sleep(5)
|
||||
if len(self.parts) > 0 and not IS_UPLOADING.is_set():
|
||||
self.do_upload()
|
||||
else:
|
||||
LOGGER.warn("Upload >{}< Failed".format(part.path))
|
||||
IS_UPLOADING.clear()
|
||||
workflow_item = ENCODING_QUEUE.get()
|
||||
LOGGER.info("收到工作流请求:ID:【{}】".format(workflow_item.id))
|
||||
for video_clip in workflow_item.video_clips:
|
||||
IS_ENCODING.set()
|
||||
try:
|
||||
LOGGER.info("工作流视频:ID:【{}】,路径:【{}】".format(video_clip.id, video_clip.full_path))
|
||||
if len(video_clip.danmaku_clips) < 1:
|
||||
_parts = quick_split_video(video_clip.full_path)
|
||||
else:
|
||||
_parts = do_workflow(video_clip.full_path, video_clip.danmaku_clips[0].full_path)
|
||||
LOGGER.info("工作流视频压制完成:结果:【{}】".format(_parts))
|
||||
for _part in _parts:
|
||||
self.parts.append(_part)
|
||||
except:
|
||||
LOGGER.error("压制异常!工作流视频:ID:【{}】,路径:【{}】".format(video_clip.id, video_clip.full_path))
|
||||
finally:
|
||||
IS_ENCODING.clear()
|
||||
|
||||
def finish_upload(self,
|
||||
title,
|
||||
tid,
|
||||
tag,
|
||||
desc,
|
||||
source='',
|
||||
cover='',
|
||||
no_reprint=1,
|
||||
):
|
||||
"""
|
||||
:param title: video's title
|
||||
:type title: str
|
||||
:param tid: video type, see: https://member.bilibili.com/x/web/archive/pre
|
||||
or https://github.com/uupers/BiliSpider/wiki/%E8%A7%86%E9%A2%91%E5%88%86%E5%8C%BA%E5%AF%B9%E5%BA%94%E8%A1%A8
|
||||
:type tid: int
|
||||
:param tag: video's tag
|
||||
:type tag: str
|
||||
:param desc: video's description
|
||||
:type desc: str
|
||||
:param source: (optional) 转载地址
|
||||
:type source: str
|
||||
:param cover: (optional) cover's URL, use method *cover_up* to get
|
||||
:type cover: str
|
||||
:param no_reprint: (optional) 0=可以转载, 1=禁止转载(default)
|
||||
:type no_reprint: int
|
||||
:param copyright: (optional) 0=转载的, 1=自制的(default)
|
||||
:type copyright: int
|
||||
"""
|
||||
if len(self.parts) == 0:
|
||||
return
|
||||
if IS_ENCODING.is_set():
|
||||
LOGGER.info("[{}]仍在压制,取消发布".format(title))
|
||||
return
|
||||
if IS_LIVING.is_set():
|
||||
LOGGER.info("[{}]仍在直播,取消发布".format(title))
|
||||
return
|
||||
if IS_UPLOADING.is_set():
|
||||
LOGGER.info("[{}]仍在上传,取消发布".format(title))
|
||||
return
|
||||
LOGGER.info("[{}]投稿中,请稍后".format(title))
|
||||
copyright = 2 if source else 1
|
||||
try:
|
||||
avid, bvid = core.upload(self.access_token, self.session_id, self.user_id, self.parts, copyright,
|
||||
title=title, tid=tid, tag=tag, desc=desc, source=source, cover=cover, no_reprint=no_reprint)
|
||||
LOGGER.info("[{}]投稿成功;AVID【{}】,BVID【{}】".format(title, avid, bvid))
|
||||
self.clear()
|
||||
except Exception as e:
|
||||
LOGGER.error("[{}]投稿失败".format(title), exc_info=e)
|
||||
|
||||
|
||||
def reloadFromPrevious(self):
|
||||
...
|
||||
def do_upload(self):
|
||||
LOGGER.info("尝试投稿:内容【{}】".format(self.parts))
|
||||
self.clear()
|
||||
|
||||
def clear(self):
|
||||
self.parts = []
|
||||
|
||||
|
||||
|
||||
INSTANCE = Bilibili()
|
||||
|
@ -1,4 +0,0 @@
|
||||
修改自
|
||||
[BilibiliUploader](https://github.com/FortuneDayssss/BilibiliUploader/)
|
||||
|
||||
LICENSE:GPL
|
@ -1,4 +0,0 @@
|
||||
from .bilibiliuploader import BilibiliUploader
|
||||
from .core import VideoPart
|
||||
|
||||
__version__ = '0.0.6'
|
@ -1,91 +0,0 @@
|
||||
from .core import login_by_access_token, upload, edit_videos
|
||||
from .util import cipher
|
||||
|
||||
|
||||
class BilibiliUploader():
|
||||
def __init__(self):
|
||||
self.access_token = None
|
||||
self.refresh_token = None
|
||||
self.sid = None
|
||||
self.mid = None
|
||||
|
||||
def login_by_access_token(self, access_token, refresh_token=None):
|
||||
self.access_token = access_token
|
||||
self.refresh_token = refresh_token
|
||||
self.sid, self.mid, _ = login_by_access_token(access_token)
|
||||
|
||||
def upload(self,
|
||||
parts,
|
||||
copyright: int,
|
||||
title: str,
|
||||
tid: int,
|
||||
tag: str,
|
||||
desc: str,
|
||||
source: str = '',
|
||||
cover: str = '',
|
||||
no_reprint: int = 0,
|
||||
open_elec: int = 1,
|
||||
max_retry: int = 5,
|
||||
thread_pool_workers: int = 1):
|
||||
return upload(self.access_token,
|
||||
self.sid,
|
||||
self.mid,
|
||||
parts,
|
||||
copyright,
|
||||
title,
|
||||
tid,
|
||||
tag,
|
||||
desc,
|
||||
source,
|
||||
cover,
|
||||
no_reprint,
|
||||
open_elec,
|
||||
max_retry,
|
||||
thread_pool_workers)
|
||||
|
||||
def edit(self,
|
||||
avid=None,
|
||||
bvid=None,
|
||||
parts=None,
|
||||
insert_index=None,
|
||||
copyright=None,
|
||||
title=None,
|
||||
tid=None,
|
||||
tag=None,
|
||||
desc=None,
|
||||
source=None,
|
||||
cover=None,
|
||||
no_reprint=None,
|
||||
open_elec=None,
|
||||
max_retry: int = 5,
|
||||
thread_pool_workers: int = 1):
|
||||
|
||||
if not avid and not bvid:
|
||||
print("please provide avid or bvid")
|
||||
return None, None
|
||||
if not avid:
|
||||
avid = cipher.bv2av(bvid)
|
||||
if not isinstance(parts, list):
|
||||
parts = [parts]
|
||||
if type(avid) is str:
|
||||
avid = int(avid)
|
||||
edit_videos(
|
||||
self.access_token,
|
||||
self.sid,
|
||||
self.mid,
|
||||
avid,
|
||||
bvid,
|
||||
parts,
|
||||
insert_index,
|
||||
copyright,
|
||||
title,
|
||||
tid,
|
||||
tag,
|
||||
desc,
|
||||
source,
|
||||
cover,
|
||||
no_reprint,
|
||||
open_elec,
|
||||
max_retry,
|
||||
thread_pool_workers
|
||||
)
|
@ -1,641 +0,0 @@
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from .util import cipher as cipher
|
||||
import os
|
||||
import math
|
||||
import hashlib
|
||||
from .util.retry import Retry
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
# From PC ugc_assisstant
|
||||
# APPKEY = 'aae92bc66f3edfab'
|
||||
# APPSECRET = 'af125a0d5279fd576c1b4418a3e8276d'
|
||||
APPKEY = '1d8b6e7d45233436'
|
||||
APPSECRET = '560c52ccd288fed045859ed18bffd973'
|
||||
LOGIN_APPKEY = '783bbb7264451d82'
|
||||
|
||||
# upload chunk size = 2MB
|
||||
CHUNK_SIZE = 2 * 1024 * 1024
|
||||
|
||||
|
||||
class VideoPart:
|
||||
"""
|
||||
Video Part of a post.
|
||||
每个对象代表一个分P
|
||||
|
||||
Attributes:
|
||||
path: file path in local file system.
|
||||
title: title of the video part.
|
||||
desc: description of the video part.
|
||||
server_file_name: file name in bilibili server. generated by pre-upload API.
|
||||
"""
|
||||
|
||||
def __init__(self, path, title='', desc='', server_file_name=None):
|
||||
self.path = path
|
||||
self.title = title
|
||||
self.desc = desc
|
||||
self.server_file_name = server_file_name
|
||||
|
||||
def __repr__(self):
|
||||
return '<{clazz}, path: {path}, title: {title}, desc: {desc}, server_file_name:{server_file_name}>' \
|
||||
.format(clazz=self.__class__.__name__,
|
||||
path=self.path,
|
||||
title=self.title,
|
||||
desc=self.desc,
|
||||
server_file_name=self.server_file_name)
|
||||
|
||||
|
||||
def get_key_old(sid=None, jsessionid=None):
|
||||
"""
|
||||
get public key, hash and session id for login.
|
||||
Args:
|
||||
sid: session id. only for captcha login.
|
||||
jsessionid: j-session id. only for captcha login.
|
||||
Returns:
|
||||
hash: salt for password encryption.
|
||||
pubkey: rsa public key for password encryption.
|
||||
sid: session id.
|
||||
"""
|
||||
headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'Accept': "application/json, text/javascript, */*; q=0.01"
|
||||
}
|
||||
post_data = {
|
||||
'appkey': APPKEY,
|
||||
'platform': "pc",
|
||||
'ts': str(int(datetime.now().timestamp()))
|
||||
}
|
||||
post_data['sign'] = cipher.sign_dict(post_data, APPSECRET)
|
||||
cookie = {}
|
||||
if sid:
|
||||
cookie['sid'] = sid
|
||||
if jsessionid:
|
||||
cookie['JSESSIONID'] = jsessionid
|
||||
r = requests.post(
|
||||
# "https://passport.bilibili.com/api/oauth2/getKey",
|
||||
"https://passport.bilibili.com/x/passport-login/web/key",
|
||||
headers=headers,
|
||||
data=post_data,
|
||||
cookies=cookie
|
||||
)
|
||||
print(r.content.decode())
|
||||
r_data = r.json()['data']
|
||||
if sid:
|
||||
return r_data['hash'], r_data['key'], sid
|
||||
return r_data['hash'], r_data['key'], r.cookies['sid']
|
||||
|
||||
|
||||
def get_key():
|
||||
headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'Accept': "application/json, text/javascript, */*; q=0.01"
|
||||
}
|
||||
params_data = {
|
||||
'appkey': LOGIN_APPKEY,
|
||||
# 'ts': str(int(datetime.now().timestamp()))
|
||||
}
|
||||
params_data['sign'] = cipher.login_sign_dict_bin(params_data)
|
||||
r = requests.get(
|
||||
"https://passport.bilibili.com/x/passport-login/web/key",
|
||||
headers=headers,
|
||||
params=params_data
|
||||
)
|
||||
r_data = r.json()['data']
|
||||
return r_data['hash'], r_data['key'], ''
|
||||
|
||||
|
||||
def get_capcha(sid):
|
||||
headers = {
|
||||
'User-Agent': '',
|
||||
'Accept-Encoding': 'gzip,deflate',
|
||||
}
|
||||
|
||||
params = {
|
||||
'appkey': APPKEY,
|
||||
'platform': 'pc',
|
||||
'ts': str(int(datetime.now().timestamp()))
|
||||
}
|
||||
params['sign'] = cipher.sign_dict(params, APPSECRET)
|
||||
|
||||
r = requests.get(
|
||||
"https://passport.bilibili.com/captcha",
|
||||
headers=headers,
|
||||
params=params,
|
||||
cookies={
|
||||
'sid': sid
|
||||
}
|
||||
)
|
||||
|
||||
print(r.status_code)
|
||||
|
||||
capcha_data = r.content
|
||||
|
||||
return r.cookies['JSESSIONID'], capcha_data
|
||||
|
||||
|
||||
def login_by_access_token(access_token):
|
||||
"""
|
||||
bilibili access token login.
|
||||
Args:
|
||||
access_token: Bilibili access token got by previous username/password login.
|
||||
|
||||
Returns:
|
||||
sid: session id.
|
||||
mid: member id.
|
||||
expires_in: access token expire time
|
||||
"""
|
||||
headers = {
|
||||
'Connection': 'keep-alive',
|
||||
'Accept-Encoding': 'gzip,deflate',
|
||||
'Host': 'passport.bilibili.com',
|
||||
'User-Agent': '',
|
||||
}
|
||||
|
||||
login_params = {
|
||||
'appkey': APPKEY,
|
||||
'access_token': access_token,
|
||||
'platform': "pc",
|
||||
'ts': str(int(datetime.now().timestamp())),
|
||||
}
|
||||
login_params['sign'] = cipher.sign_dict(login_params, APPSECRET)
|
||||
|
||||
r = requests.get(
|
||||
url="https://passport.bilibili.com/api/oauth2/info",
|
||||
headers=headers,
|
||||
params=login_params
|
||||
)
|
||||
|
||||
login_data = r.json()['data']
|
||||
|
||||
return r.cookies['sid'], login_data['mid'], login_data["expires_in"]
|
||||
|
||||
|
||||
def upload_cover(access_token, sid, cover_file_path):
|
||||
with open(cover_file_path, "rb") as f:
|
||||
cover_pic = f.read()
|
||||
|
||||
headers = {
|
||||
'Connection': 'keep-alive',
|
||||
'Host': 'member.bilibili.com',
|
||||
'Accept-Encoding': 'gzip,deflate',
|
||||
'User-Agent': '',
|
||||
}
|
||||
|
||||
params = {
|
||||
"access_key": access_token,
|
||||
}
|
||||
|
||||
params["sign"] = cipher.sign_dict(params, APPSECRET)
|
||||
|
||||
files = {
|
||||
'file': ("cover.png", cover_pic, "Content-Type: image/png"),
|
||||
}
|
||||
|
||||
r = requests.post(
|
||||
"http://member.bilibili.com/x/vu/client/cover/up",
|
||||
headers=headers,
|
||||
params=params,
|
||||
files=files,
|
||||
cookies={
|
||||
'sid': sid
|
||||
},
|
||||
verify=False,
|
||||
)
|
||||
|
||||
return r.json()["data"]["url"]
|
||||
|
||||
|
||||
def upload_chunk(upload_url, server_file_name, local_file_name, chunk_data, chunk_size, chunk_id, chunk_total_num):
|
||||
"""
|
||||
upload video chunk.
|
||||
Args:
|
||||
upload_url: upload url by pre_upload api.
|
||||
server_file_name: file name on server by pre_upload api.
|
||||
local_file_name: video file name in local fs.
|
||||
chunk_data: binary data of video chunk.
|
||||
chunk_size: default of ugc_assisstant is 2M.
|
||||
chunk_id: chunk number.
|
||||
chunk_total_num: total chunk number.
|
||||
|
||||
Returns:
|
||||
True: upload chunk success.
|
||||
False: upload chunk fail.
|
||||
"""
|
||||
print("filename: {}".format(local_file_name), "chunk{}/{}".format(chunk_id, chunk_total_num))
|
||||
files = {
|
||||
'version': (None, '2.0.0.1054'),
|
||||
'filesize': (None, chunk_size),
|
||||
'chunk': (None, chunk_id),
|
||||
'chunks': (None, chunk_total_num),
|
||||
'md5': (None, cipher.md5_bytes(chunk_data)),
|
||||
'file': (local_file_name, chunk_data, 'application/octet-stream')
|
||||
}
|
||||
|
||||
r = requests.post(
|
||||
url=upload_url,
|
||||
files=files,
|
||||
cookies={
|
||||
'PHPSESSID': server_file_name
|
||||
},
|
||||
)
|
||||
r.raise_for_status()
|
||||
if r.status_code == 200 and r.json().get("OK", 0) == 1:
|
||||
return True
|
||||
else:
|
||||
print(r.status_code)
|
||||
print(r.content)
|
||||
return False
|
||||
|
||||
|
||||
def upload_video_part(access_token, sid, mid, video_part: VideoPart, max_retry=5):
|
||||
"""
|
||||
upload a video file.
|
||||
Args:
|
||||
access_token: access token generated by login api.
|
||||
sid: session id.
|
||||
mid: member id.
|
||||
video_part: local video file data.
|
||||
max_retry: max retry number for each chunk.
|
||||
|
||||
Returns:
|
||||
status: success or fail.
|
||||
server_file_name: server file name by pre_upload api.
|
||||
"""
|
||||
if not isinstance(video_part, VideoPart):
|
||||
return False
|
||||
if video_part.server_file_name is not None:
|
||||
return True
|
||||
headers = {
|
||||
'Connection': 'keep-alive',
|
||||
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
|
||||
'User-Agent': '',
|
||||
'Accept-Encoding': 'gzip,deflate',
|
||||
}
|
||||
|
||||
r = requests.get(
|
||||
"http://member.bilibili.com/preupload?access_key={}&mid={}&profile=ugcfr%2Fpc3".format(access_token, mid),
|
||||
headers=headers,
|
||||
cookies={
|
||||
'sid': sid
|
||||
},
|
||||
verify=False,
|
||||
)
|
||||
|
||||
pre_upload_data = r.json()
|
||||
upload_url = pre_upload_data['url']
|
||||
complete_upload_url = pre_upload_data['complete']
|
||||
server_file_name = pre_upload_data['filename']
|
||||
local_file_name = video_part.path
|
||||
|
||||
file_size = os.path.getsize(local_file_name)
|
||||
chunk_total_num = int(math.ceil(file_size / CHUNK_SIZE))
|
||||
file_hash = hashlib.md5()
|
||||
with open(local_file_name, 'rb') as f:
|
||||
for chunk_id in range(0, chunk_total_num):
|
||||
chunk_data = f.read(CHUNK_SIZE)
|
||||
status = Retry(max_retry=max_retry, success_return_value=True).run(
|
||||
upload_chunk,
|
||||
upload_url,
|
||||
server_file_name,
|
||||
os.path.basename(local_file_name),
|
||||
chunk_data,
|
||||
CHUNK_SIZE,
|
||||
chunk_id,
|
||||
chunk_total_num
|
||||
)
|
||||
|
||||
if not status:
|
||||
return False
|
||||
file_hash.update(chunk_data)
|
||||
print(file_hash.hexdigest())
|
||||
|
||||
# complete upload
|
||||
post_data = {
|
||||
'chunks': chunk_total_num,
|
||||
'filesize': file_size,
|
||||
'md5': file_hash.hexdigest(),
|
||||
'name': os.path.basename(local_file_name),
|
||||
'version': '2.0.0.1054',
|
||||
}
|
||||
|
||||
r = requests.post(
|
||||
url=complete_upload_url,
|
||||
data=post_data,
|
||||
headers=headers,
|
||||
)
|
||||
print(r.status_code)
|
||||
print(r.content)
|
||||
|
||||
video_part.server_file_name = server_file_name
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def upload(access_token,
|
||||
sid,
|
||||
mid,
|
||||
parts,
|
||||
copyright: int,
|
||||
title: str,
|
||||
tid: int,
|
||||
tag: str,
|
||||
desc: str,
|
||||
source: str = '',
|
||||
cover: str = '',
|
||||
no_reprint: int = 0,
|
||||
open_elec: int = 1,
|
||||
max_retry: int = 5,
|
||||
thread_pool_workers: int = 1):
|
||||
"""
|
||||
upload video.
|
||||
|
||||
Args:
|
||||
access_token: oauth2 access token.
|
||||
sid: session id.
|
||||
mid: member id.
|
||||
parts: VideoPart list.
|
||||
copyright: 原创/转载.
|
||||
title: 投稿标题.
|
||||
tid: 分区id.
|
||||
tag: 标签.
|
||||
desc: 投稿简介.
|
||||
source: 转载地址.
|
||||
cover: 封面图片文件路径.
|
||||
no_reprint: 可否转载.
|
||||
open_elec: 充电.
|
||||
max_retry: max retry time for each chunk.
|
||||
thread_pool_workers: max upload threads.
|
||||
|
||||
Returns:
|
||||
(aid, bvid)
|
||||
aid: av号
|
||||
bvid: bv号
|
||||
"""
|
||||
if not isinstance(parts, list):
|
||||
parts = [parts]
|
||||
|
||||
status = True
|
||||
with ThreadPoolExecutor(max_workers=thread_pool_workers) as tpe:
|
||||
t_list = []
|
||||
for video_part in parts:
|
||||
print("upload {} added in pool".format(video_part.title))
|
||||
t_obj = tpe.submit(upload_video_part, access_token, sid, mid, video_part, max_retry)
|
||||
t_obj.video_part = video_part
|
||||
t_list.append(t_obj)
|
||||
|
||||
for t_obj in as_completed(t_list):
|
||||
status = status and t_obj.result()
|
||||
print("video part {} finished, status: {}".format(t_obj.video_part.title, t_obj.result()))
|
||||
if not status:
|
||||
print("upload failed")
|
||||
return None, None
|
||||
|
||||
# cover
|
||||
if os.path.isfile(cover):
|
||||
try:
|
||||
cover = upload_cover(access_token, sid, cover)
|
||||
except:
|
||||
cover = ''
|
||||
else:
|
||||
cover = ''
|
||||
|
||||
# submit
|
||||
headers = {
|
||||
'Connection': 'keep-alive',
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': '',
|
||||
}
|
||||
post_data = {
|
||||
'build': 1054,
|
||||
'copyright': copyright,
|
||||
'cover': cover,
|
||||
'desc': desc,
|
||||
'no_reprint': no_reprint,
|
||||
'open_elec': open_elec,
|
||||
'source': source,
|
||||
'tag': tag,
|
||||
'tid': tid,
|
||||
'title': title,
|
||||
'videos': []
|
||||
}
|
||||
for video_part in parts:
|
||||
post_data['videos'].append({
|
||||
"desc": video_part.desc,
|
||||
"filename": video_part.server_file_name,
|
||||
"title": video_part.title
|
||||
})
|
||||
|
||||
params = {
|
||||
'access_key': access_token,
|
||||
}
|
||||
params['sign'] = cipher.sign_dict(params, APPSECRET)
|
||||
r = requests.post(
|
||||
url="http://member.bilibili.com/x/vu/client/add",
|
||||
params=params,
|
||||
headers=headers,
|
||||
verify=False,
|
||||
cookies={
|
||||
'sid': sid
|
||||
},
|
||||
json=post_data,
|
||||
)
|
||||
|
||||
print("submit")
|
||||
print(r.status_code)
|
||||
print(r.content.decode())
|
||||
|
||||
data = r.json()["data"]
|
||||
return data["aid"], data["bvid"]
|
||||
|
||||
|
||||
def get_post_data(access_token, sid, avid):
|
||||
headers = {
|
||||
'Connection': 'keep-alive',
|
||||
'Host': 'member.bilibili.com',
|
||||
'Accept-Encoding': 'gzip,deflate',
|
||||
'User-Agent': '',
|
||||
}
|
||||
|
||||
params = {
|
||||
"access_key": access_token,
|
||||
"aid": avid,
|
||||
"build": "1054"
|
||||
}
|
||||
|
||||
params["sign"] = cipher.sign_dict(params, APPSECRET)
|
||||
|
||||
r = requests.get(
|
||||
url="http://member.bilibili.com/x/client/archive/view",
|
||||
headers=headers,
|
||||
params=params,
|
||||
cookies={
|
||||
'sid': sid
|
||||
}
|
||||
)
|
||||
|
||||
return r.json()["data"]
|
||||
|
||||
|
||||
def edit_videos(
|
||||
access_token,
|
||||
sid,
|
||||
mid,
|
||||
avid=None,
|
||||
bvid=None,
|
||||
parts=None,
|
||||
insert_index=None,
|
||||
copyright=None,
|
||||
title=None,
|
||||
tid=None,
|
||||
tag=None,
|
||||
desc=None,
|
||||
source=None,
|
||||
cover=None,
|
||||
no_reprint=None,
|
||||
open_elec=None,
|
||||
max_retry: int = 5,
|
||||
thread_pool_workers: int = 1):
|
||||
"""
|
||||
insert videos into existed post.
|
||||
|
||||
Args:
|
||||
access_token: oauth2 access token.
|
||||
sid: session id.
|
||||
mid: member id.
|
||||
avid: av number,
|
||||
bvid: bv string,
|
||||
parts: VideoPart list.
|
||||
insert_index: new video index.
|
||||
copyright: 原创/转载.
|
||||
title: 投稿标题.
|
||||
tid: 分区id.
|
||||
tag: 标签.
|
||||
desc: 投稿简介.
|
||||
source: 转载地址.
|
||||
cover: cover url.
|
||||
no_reprint: 可否转载.
|
||||
open_elec: 充电.
|
||||
max_retry: max retry time for each chunk.
|
||||
thread_pool_workers: max upload threads.
|
||||
|
||||
Returns:
|
||||
(aid, bvid)
|
||||
aid: av号
|
||||
bvid: bv号
|
||||
"""
|
||||
if not avid and not bvid:
|
||||
print("please provide avid or bvid")
|
||||
return None, None
|
||||
if not avid:
|
||||
avid = cipher.bv2av(bvid)
|
||||
if not isinstance(parts, list):
|
||||
parts = [parts]
|
||||
if type(avid) is str:
|
||||
avid = int(avid)
|
||||
|
||||
post_video_data = get_post_data(access_token, sid, avid)
|
||||
|
||||
status = True
|
||||
with ThreadPoolExecutor(max_workers=thread_pool_workers) as tpe:
|
||||
t_list = []
|
||||
for video_part in parts:
|
||||
print("upload {} added in pool".format(video_part.title))
|
||||
t_obj = tpe.submit(upload_video_part, access_token, sid, mid, video_part, max_retry)
|
||||
t_obj.video_part = video_part
|
||||
t_list.append(t_obj)
|
||||
|
||||
for t_obj in as_completed(t_list):
|
||||
status = status and t_obj.result()
|
||||
print("video part {} finished, status: {}".format(t_obj.video_part.title, t_obj.result()))
|
||||
if not status:
|
||||
print("upload failed")
|
||||
return None, None
|
||||
|
||||
headers = {
|
||||
'Connection': 'keep-alive',
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': '',
|
||||
}
|
||||
submit_data = {
|
||||
'aid': avid,
|
||||
'build': 1054,
|
||||
'copyright': post_video_data["archive"]["copyright"],
|
||||
'cover': post_video_data["archive"]["cover"],
|
||||
'desc': post_video_data["archive"]["desc"],
|
||||
'no_reprint': post_video_data["archive"]["no_reprint"],
|
||||
'open_elec': post_video_data["archive_elec"]["state"], # open_elec not tested
|
||||
'source': post_video_data["archive"]["source"],
|
||||
'tag': post_video_data["archive"]["tag"],
|
||||
'tid': post_video_data["archive"]["tid"],
|
||||
'title': post_video_data["archive"]["title"],
|
||||
'videos': post_video_data["videos"]
|
||||
}
|
||||
|
||||
# cover
|
||||
if os.path.isfile(cover):
|
||||
try:
|
||||
cover = upload_cover(access_token, sid, cover)
|
||||
except:
|
||||
cover = ''
|
||||
else:
|
||||
cover = ''
|
||||
|
||||
# edit archive data
|
||||
if copyright:
|
||||
submit_data["copyright"] = copyright
|
||||
if title:
|
||||
submit_data["title"] = title
|
||||
if tid:
|
||||
submit_data["tid"] = tid
|
||||
if tag:
|
||||
submit_data["tag"] = tag
|
||||
if desc:
|
||||
submit_data["desc"] = desc
|
||||
if source:
|
||||
submit_data["source"] = source
|
||||
if cover:
|
||||
submit_data["cover"] = cover
|
||||
if no_reprint:
|
||||
submit_data["no_reprint"] = no_reprint
|
||||
if open_elec:
|
||||
submit_data["open_elec"] = open_elec
|
||||
|
||||
if type(insert_index) is int:
|
||||
for i, video_part in enumerate(parts):
|
||||
submit_data['videos'].insert(insert_index + i, {
|
||||
"desc": video_part.desc,
|
||||
"filename": video_part.server_file_name,
|
||||
"title": video_part.title
|
||||
})
|
||||
elif insert_index is None:
|
||||
for video_part in parts:
|
||||
submit_data['videos'].append({
|
||||
"desc": video_part.desc,
|
||||
"filename": video_part.server_file_name,
|
||||
"title": video_part.title
|
||||
})
|
||||
else:
|
||||
print("wrong insert index")
|
||||
return None, None
|
||||
|
||||
params = {
|
||||
'access_key': access_token,
|
||||
}
|
||||
params['sign'] = cipher.sign_dict(params, APPSECRET)
|
||||
r = requests.post(
|
||||
url="http://member.bilibili.com/x/vu/client/edit",
|
||||
params=params,
|
||||
headers=headers,
|
||||
verify=False,
|
||||
cookies={
|
||||
'sid': sid
|
||||
},
|
||||
json=submit_data,
|
||||
)
|
||||
|
||||
print("edit submit")
|
||||
print(r.status_code)
|
||||
print(r.content.decode())
|
||||
|
||||
data = r.json()["data"]
|
||||
return data["aid"], data["bvid"]
|
@ -1 +0,0 @@
|
||||
from .cipher import *
|
@ -1,119 +0,0 @@
|
||||
import hashlib
|
||||
import rsa
|
||||
import base64
|
||||
import subprocess
|
||||
import platform
|
||||
import os.path
|
||||
|
||||
|
||||
def md5(data: str):
|
||||
"""
|
||||
generate md5 hash of utf-8 encoded string.
|
||||
"""
|
||||
return hashlib.md5(data.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def md5_bytes(data: bytes):
|
||||
"""
|
||||
generate md5 hash of binary.
|
||||
"""
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
|
||||
def sign_str(data: str, app_secret: str):
|
||||
"""
|
||||
sign a string of request parameters
|
||||
Args:
|
||||
data: string of request parameters, must be sorted by key before input.
|
||||
app_secret: a secret string coupled with app_key.
|
||||
|
||||
Returns:
|
||||
A hash string. len=32
|
||||
"""
|
||||
return md5(data + app_secret)
|
||||
|
||||
|
||||
def sign_dict(data: dict, app_secret: str):
|
||||
"""
|
||||
sign a dictionary of request parameters
|
||||
Args:
|
||||
data: dictionary of request parameters.
|
||||
app_secret: a secret string coupled with app_key.
|
||||
|
||||
Returns:
|
||||
A hash string. len=32
|
||||
"""
|
||||
data_str = []
|
||||
keys = list(data.keys())
|
||||
keys.sort()
|
||||
for key in keys:
|
||||
data_str.append("{}={}".format(key, data[key]))
|
||||
data_str = "&".join(data_str)
|
||||
data_str = data_str + app_secret
|
||||
return md5(data_str)
|
||||
|
||||
|
||||
def login_sign_dict_bin(data: dict):
|
||||
data_str = []
|
||||
keys = list(data.keys())
|
||||
keys.sort()
|
||||
for key in keys:
|
||||
data_str.append("{}={}".format(key, data[key]))
|
||||
data_str = "&".join(data_str)
|
||||
package_directory = os.path.dirname(os.path.abspath(__file__))
|
||||
if platform.system().lower() == 'windows':
|
||||
print(data_str)
|
||||
print(subprocess.Popen([os.path.join(package_directory, "sign.exe"), data_str], stdout=subprocess.PIPE).communicate()[0].decode().strip())
|
||||
|
||||
return subprocess.Popen([os.path.join(package_directory, "sign.exe"), data_str], stdout=subprocess.PIPE).communicate()[0].decode().strip()
|
||||
if platform.system().lower() == 'linux':
|
||||
return subprocess.Popen([os.path.join(package_directory, "sign.out"), data_str], stdout=subprocess.PIPE).communicate()[0].decode().strip()
|
||||
raise Exception("Operating System is not supported.")
|
||||
|
||||
|
||||
def encrypt_login_password(password, hash, pubkey):
|
||||
"""
|
||||
encrypt password for login api.
|
||||
Args:
|
||||
password: plain text of user password.
|
||||
hash: hash provided by /api/oauth2/getKey.
|
||||
pubkey: public key provided by /api/oauth2/getKey.
|
||||
|
||||
Returns:
|
||||
An encrypted cipher of password.
|
||||
"""
|
||||
return base64.b64encode(rsa.encrypt(
|
||||
(hash + password).encode('utf-8'),
|
||||
rsa.PublicKey.load_pkcs1_openssl_pem(pubkey.encode()),
|
||||
))
|
||||
|
||||
|
||||
def av2bv(av: int):
|
||||
table = 'fZodR9XQDSUm21yCkr6zBqiveYah8bt4xsWpHnJE7jL5VG3guMTKNPAwcF'
|
||||
tr = {}
|
||||
for i in range(58):
|
||||
tr[table[i]] = i
|
||||
s = [11, 10, 3, 8, 4, 6]
|
||||
xor = 177451812
|
||||
add = 8728348608
|
||||
|
||||
av = (av ^ xor) + add
|
||||
r = list('BV1 4 1 7 ')
|
||||
for i in range(6):
|
||||
r[s[i]] = table[av // 58 ** i % 58]
|
||||
return ''.join(r)
|
||||
|
||||
|
||||
def bv2av(bv: str):
|
||||
table = 'fZodR9XQDSUm21yCkr6zBqiveYah8bt4xsWpHnJE7jL5VG3guMTKNPAwcF'
|
||||
tr = {}
|
||||
for i in range(58):
|
||||
tr[table[i]] = i
|
||||
s = [11, 10, 3, 8, 4, 6]
|
||||
xor = 177451812
|
||||
add = 8728348608
|
||||
|
||||
r = 0
|
||||
for i in range(6):
|
||||
r += tr[bv[s[i]]] * 58 ** i
|
||||
return (r - add) ^ xor
|
@ -1,21 +0,0 @@
|
||||
from time import sleep
|
||||
|
||||
|
||||
class Retry:
|
||||
def __init__(self, max_retry, success_return_value, sleep_sec = 60):
|
||||
self.max_retry = max_retry
|
||||
self.success_return_value = success_return_value
|
||||
self.sleep_sec = 60
|
||||
|
||||
def run(self, func, *args, **kwargs):
|
||||
status = False
|
||||
for i in range(0, self.max_retry):
|
||||
try:
|
||||
return_value = func(*args, **kwargs)
|
||||
except Exception:
|
||||
sleep(self.sleep_sec)
|
||||
continue
|
||||
if return_value == self.success_return_value:
|
||||
status = True
|
||||
break
|
||||
return status
|
@ -8,6 +8,7 @@ from typing import Union
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from config import DANMAKU_EXEC, VIDEO_RESOLUTION, DANMAKU_SPEED, DANMAKU_FONT_NAME, DANMAKU_FONT_SIZE, \
|
||||
DANMAKU_OPACITY, \
|
||||
DANMAKU_USE_DANMU2ASS, DANMAKU_USE_DANMAKUFACTORY
|
||||
from exception.danmaku import NoDanmakuException, DanmakuFormatErrorException
|
||||
from util.file import check_file_exist
|
||||
@ -50,7 +51,7 @@ def danmaku_to_subtitle_use_danmaku_factory(file: Union[os.PathLike[str], str],
|
||||
DANMAKU_EXEC, "--ignore-warnings",
|
||||
"-r", str(VIDEO_RESOLUTION), "-s", str(DANMAKU_SPEED), "-f", "5",
|
||||
"-S", str(DANMAKU_FONT_SIZE), "-N", str(DANMAKU_FONT_NAME), "--showmsgbox", "FALSE",
|
||||
"-O", "255", "-L", "1", "-D", "0",
|
||||
"-O", "{:.0f}".format(DANMAKU_OPACITY*255/100), "-L", "1", "-D", "0",
|
||||
"-o", "ass", new_subtitle_name, "-i", file, "-t", str(time_shift)
|
||||
))
|
||||
|
||||
@ -58,7 +59,7 @@ def danmaku_to_subtitle_use_danmaku_factory(file: Union[os.PathLike[str], str],
|
||||
def danmaku_to_subtitle_use_danmu2ass(file: Union[os.PathLike[str], str], time_shift: float, new_subtitle_name: str):
|
||||
(_w, _h) = VIDEO_RESOLUTION.split("x")
|
||||
return subprocess.Popen((
|
||||
DANMAKU_EXEC, "--force", "-a", "1", "-d", str(DANMAKU_SPEED), "--font", str(DANMAKU_FONT_NAME),
|
||||
DANMAKU_EXEC, "--force", "-a", "{:.1f}".format(DANMAKU_OPACITY/100.0), "-d", str(DANMAKU_SPEED), "--font", str(DANMAKU_FONT_NAME),
|
||||
"--font-size", str(DANMAKU_FONT_SIZE), "--lane-size", str(DANMAKU_FONT_SIZE), "--width", _w, "--height", _h,
|
||||
"-o", new_subtitle_name, "-p", "1", "--time-offset", str(time_shift), "--width-ratio", "1", file
|
||||
))
|
||||
|
@ -1,11 +1,12 @@
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import IO
|
||||
|
||||
from config import FFMPEG_EXEC, FFMPEG_USE_HEVC, VIDEO_BITRATE, FFMPEG_USE_NVIDIA_GPU, VIDEO_CLIP_EACH_SEC, \
|
||||
VIDEO_CLIP_OVERFLOW_SEC, \
|
||||
FFMPEG_USE_INTEL_GPU, VIDEO_OUTPUT_DIR, VIDEO_GOP, FFMPEG_USE_VAAPI
|
||||
from config import VIDEO_CLIP_EACH_SEC, VIDEO_CLIP_OVERFLOW_SEC, VIDEO_OUTPUT_DIR, \
|
||||
FFMPEG_EXEC, HANDBRAKE_EXEC, HANDBRAKE_PRESET_FILE, HANDBRAKE_PRESET, HANDBRAKE_ENCOPT
|
||||
from . import LOGGER
|
||||
|
||||
|
||||
@ -25,143 +26,65 @@ def get_video_real_duration(filename):
|
||||
|
||||
|
||||
def encode_video_with_subtitles(orig_filename: str, subtitles: list[str], base_ts: float):
|
||||
new_filename = base_ts_to_filename(base_ts, False)
|
||||
new_fullpath = os.path.join(VIDEO_OUTPUT_DIR, new_filename)
|
||||
if FFMPEG_USE_HEVC:
|
||||
if FFMPEG_USE_NVIDIA_GPU:
|
||||
process = get_encode_hevc_process_use_nvenc(orig_filename, subtitles, new_fullpath)
|
||||
elif FFMPEG_USE_VAAPI:
|
||||
process = get_encode_hevc_process_use_vaapi(orig_filename, subtitles, new_fullpath)
|
||||
elif FFMPEG_USE_INTEL_GPU:
|
||||
process = get_encode_hevc_process_use_intel(orig_filename, subtitles, new_fullpath)
|
||||
else:
|
||||
process = get_encode_hevc_process_use_cpu(orig_filename, subtitles, new_fullpath)
|
||||
else:
|
||||
if FFMPEG_USE_NVIDIA_GPU:
|
||||
process = get_encode_process_use_nvenc(orig_filename, subtitles, new_fullpath)
|
||||
elif FFMPEG_USE_VAAPI:
|
||||
process = get_encode_process_use_vaapi(orig_filename, subtitles, new_fullpath)
|
||||
elif FFMPEG_USE_INTEL_GPU:
|
||||
process = get_encode_process_use_intel(orig_filename, subtitles, new_fullpath)
|
||||
else:
|
||||
process = get_encode_process_use_cpu(orig_filename, subtitles, new_fullpath)
|
||||
handle_ffmpeg_output(process.stdout)
|
||||
create_dt = datetime.fromtimestamp(base_ts)
|
||||
current_dt = (create_dt).strftime("%Y%m%d_%H%M_")
|
||||
process = get_encode_process_use_handbrake(orig_filename,
|
||||
subtitles,
|
||||
os.path.join(VIDEO_OUTPUT_DIR, "{}.mp4".format(current_dt)))
|
||||
handle_handbrake_output(process.stdout)
|
||||
process.wait()
|
||||
return [new_fullpath]
|
||||
return [{
|
||||
"base_path": VIDEO_OUTPUT_DIR,
|
||||
"file": "{}.mp4".format(current_dt),
|
||||
}]
|
||||
|
||||
|
||||
def get_encode_process_use_nvenc(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use Nvidia NvEnc Acceleration")
|
||||
def get_encode_process_use_handbrake(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use HandBrakeCli")
|
||||
encode_process = subprocess.Popen([
|
||||
FFMPEG_EXEC, *_common_ffmpeg_setting(),
|
||||
"-i", orig_filename, "-vf",
|
||||
",".join("subtitles=%s" % i for i in subtitles) + ",hwupload_cuda",
|
||||
"-c:v", "h264_nvenc",
|
||||
*_common_ffmpeg_params(),
|
||||
# "-t", "10",
|
||||
HANDBRAKE_EXEC, *_common_handbrake_setting(),
|
||||
"--preset-import-file", HANDBRAKE_PRESET_FILE, "--preset", HANDBRAKE_PRESET,
|
||||
"-i", orig_filename, "-x", HANDBRAKE_ENCOPT,
|
||||
"--ssa-file", ",".join(i for i in subtitles),
|
||||
"--ssa-burn", ",".join("%d" % (i+1) for i in range(len(subtitles))),
|
||||
"-o",
|
||||
new_filename
|
||||
], stdout=subprocess.PIPE)
|
||||
return encode_process
|
||||
|
||||
|
||||
def get_encode_process_use_intel(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use Intel QSV Acceleration")
|
||||
encode_process = subprocess.Popen([
|
||||
FFMPEG_EXEC, *_common_ffmpeg_setting(),
|
||||
"-hwaccel", "qsv", "-i", orig_filename, "-vf",
|
||||
",".join("subtitles=%s" % i for i in subtitles),
|
||||
"-c:v", "h264_qsv",
|
||||
*_common_ffmpeg_params(),
|
||||
# "-t", "10",
|
||||
new_filename
|
||||
], stdout=subprocess.PIPE)
|
||||
return encode_process
|
||||
|
||||
|
||||
def get_encode_process_use_vaapi(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use VAAPI Acceleration")
|
||||
encode_process = subprocess.Popen([
|
||||
FFMPEG_EXEC, *_common_ffmpeg_setting(),
|
||||
"-hwaccel", "vaapi", "-hwaccel_output_format", "vaapi", "-i", orig_filename, "-vf",
|
||||
"hwmap=mode=read+write+direct,format=nv12," +
|
||||
"".join("subtitles=%s," % i for i in subtitles) + "hwmap",
|
||||
"-c:v", "h264_vaapi",
|
||||
*_common_ffmpeg_params(),
|
||||
# "-t", "10",
|
||||
new_filename
|
||||
], stdout=subprocess.PIPE)
|
||||
return encode_process
|
||||
|
||||
|
||||
def get_encode_process_use_cpu(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use CPU Encode")
|
||||
encode_process = subprocess.Popen([
|
||||
FFMPEG_EXEC, *_common_ffmpeg_setting(),
|
||||
"-i", orig_filename, "-vf",
|
||||
",".join("subtitles=%s" % i for i in subtitles),
|
||||
"-c:v", "h264",
|
||||
*_common_ffmpeg_params(),
|
||||
# "-t", "10",
|
||||
new_filename
|
||||
], stdout=subprocess.PIPE)
|
||||
return encode_process
|
||||
|
||||
|
||||
def get_encode_hevc_process_use_nvenc(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use Nvidia NvEnc Acceleration")
|
||||
encode_process = subprocess.Popen([
|
||||
FFMPEG_EXEC, *_common_ffmpeg_setting(),
|
||||
"-i", orig_filename, "-vf",
|
||||
"".join("subtitles=%s," % i for i in subtitles) + ",hwupload_cuda",
|
||||
"-c:v", "hevc_nvenc",
|
||||
*_common_ffmpeg_params(),
|
||||
# "-t", "10",
|
||||
new_filename
|
||||
], stdout=subprocess.PIPE)
|
||||
return encode_process
|
||||
|
||||
|
||||
def get_encode_hevc_process_use_vaapi(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use VAAPI Acceleration")
|
||||
encode_process = subprocess.Popen([
|
||||
FFMPEG_EXEC, *_common_ffmpeg_setting(),
|
||||
"-hwaccel", "vaapi", "-hwaccel_output_format", "vaapi", "-i", orig_filename, "-vf",
|
||||
"hwmap=mode=read+write+direct,format=nv12," +
|
||||
"".join("subtitles=%s," % i for i in subtitles) + "hwmap",
|
||||
"-c:v", "hevc_vaapi",
|
||||
*_common_ffmpeg_params(),
|
||||
# "-t", "10",
|
||||
new_filename
|
||||
], stdout=subprocess.PIPE)
|
||||
return encode_process
|
||||
|
||||
|
||||
def get_encode_hevc_process_use_intel(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use Intel QSV Acceleration")
|
||||
encode_process = subprocess.Popen([
|
||||
FFMPEG_EXEC, *_common_ffmpeg_setting(),
|
||||
"-hwaccel", "qsv", "-i", orig_filename, "-vf",
|
||||
",".join("subtitles=%s" % i for i in subtitles),
|
||||
"-c:v", "hevc_qsv",
|
||||
*_common_ffmpeg_params(),
|
||||
# "-t", "10",
|
||||
new_filename
|
||||
], stdout=subprocess.PIPE)
|
||||
return encode_process
|
||||
|
||||
|
||||
def get_encode_hevc_process_use_cpu(orig_filename: str, subtitles: list[str], new_filename: str):
|
||||
print("[+]Use CPU Encode")
|
||||
encode_process = subprocess.Popen([
|
||||
FFMPEG_EXEC, *_common_ffmpeg_setting(),
|
||||
"-i", orig_filename, "-vf",
|
||||
",".join("subtitles=%s" % i for i in subtitles),
|
||||
"-c:v", "hevc",
|
||||
*_common_ffmpeg_params(),
|
||||
# "-t", "10",
|
||||
new_filename
|
||||
], stdout=subprocess.PIPE)
|
||||
return encode_process
|
||||
def handle_handbrake_output(stdout: IO[bytes]):
|
||||
out_time = "0:0:0.0"
|
||||
speed = "0"
|
||||
if stdout is None:
|
||||
print("[!]STDOUT is null")
|
||||
return
|
||||
json_body = ""
|
||||
json_start = False
|
||||
_i = 0
|
||||
while True:
|
||||
line = stdout.readline()
|
||||
if line == b"":
|
||||
break
|
||||
if json_start:
|
||||
json_body += line.strip().decode("UTF-8")
|
||||
if line.startswith(b"}"):
|
||||
json_start = False
|
||||
status_payload = json.loads(json_body)
|
||||
if status_payload["State"] == "WORKING":
|
||||
out_time = "ETA: {Hours:02d}:{Minutes:02d}:{Seconds:02d}".format_map(status_payload["Working"])
|
||||
speed = "{Rate:.2f}FPS".format_map(status_payload["Working"])
|
||||
_i += 1
|
||||
if _i % 300 == 150:
|
||||
LOGGER.debug("[>]Speed:{}@{}".format(out_time, speed))
|
||||
elif status_payload["State"] == "WORKDONE":
|
||||
break
|
||||
continue
|
||||
if line.startswith(b"Progress:"):
|
||||
json_start = True
|
||||
json_body = "{"
|
||||
LOGGER.debug("[ ]Speed:{}@{}".format(out_time, speed))
|
||||
|
||||
|
||||
def handle_ffmpeg_output(stdout: IO[bytes]) -> str:
|
||||
@ -195,11 +118,12 @@ def duration_str_to_float(duration_str) -> float:
|
||||
|
||||
|
||||
def quick_split_video(file):
|
||||
warnings.warn("已过时", DeprecationWarning)
|
||||
if not os.path.isfile(file):
|
||||
raise FileNotFoundError(file)
|
||||
file_name = os.path.split(file)[-1]
|
||||
_create_dt = os.path.splitext(file_name)[0]
|
||||
create_dt = datetime.strptime(_create_dt, "%Y%m%d_%H%M")
|
||||
create_dt = datetime.strptime(_create_dt[:13], "%Y%m%d_%H%M")
|
||||
_duration_str = get_video_real_duration(file)
|
||||
duration = duration_str_to_float(_duration_str)
|
||||
current_sec = 0
|
||||
@ -235,10 +159,9 @@ def _common_ffmpeg_setting():
|
||||
)
|
||||
|
||||
|
||||
def _common_ffmpeg_params():
|
||||
def _common_handbrake_setting():
|
||||
return (
|
||||
"-f", "mp4", "-b:v", VIDEO_BITRATE, "-c:a", "aac",
|
||||
"-preset:v", "fast", "-profile:v", "main", "-avoid_negative_ts", "1",
|
||||
"-qmin", "18", "-qmax", "38", "-g:v", str(VIDEO_GOP),
|
||||
"-fflags", "+genpts", "-shortest"
|
||||
"--json",
|
||||
"--crop-mode", "none", "--no-comb-detect", "--no-bwdif", "--no-decomb", "--no-detelecine", "--no-hqdn3d",
|
||||
"--no-nlmeans", "--no-chroma-smooth", "--no-unsharp", "--no-lapsharp", "--no-deblock", "--align-av"
|
||||
)
|
||||
|
@ -13,7 +13,7 @@ def do_workflow(video_file, danmaku_base_file, *danmaku_files):
|
||||
start_ts = get_file_start(danmaku_base_file)
|
||||
except DanmakuException:
|
||||
print("基准弹幕文件异常,跳过")
|
||||
return
|
||||
return []
|
||||
result.append(danmaku_to_subtitle(danmaku_base_file, 0))
|
||||
for danmaku_file in danmaku_files:
|
||||
try:
|
||||
@ -26,15 +26,9 @@ def do_workflow(video_file, danmaku_base_file, *danmaku_files):
|
||||
print("弹幕文件", danmaku_file, "异常")
|
||||
continue
|
||||
print(result)
|
||||
file_need_split = encode_video_with_subtitles(video_file, result, start_ts)
|
||||
_video_parts = []
|
||||
for file in file_need_split:
|
||||
_video_parts += quick_split_video(file)
|
||||
_video_parts = encode_video_with_subtitles(video_file, result, start_ts)
|
||||
# clean files
|
||||
for file in result:
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
for file in file_need_split:
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
return _video_parts
|
||||
|
Loading…
x
Reference in New Issue
Block a user