repo_id
stringlengths 6
101
| size
int64 367
5.14M
| file_path
stringlengths 2
269
| content
stringlengths 367
5.14M
|
|---|---|---|---|
2977094657/BilibiliHistoryFetcher
| 16,827
|
scripts/scheduler_db.py
|
import json
import os
import sqlite3
from datetime import datetime
from typing import List, Dict, Optional
from loguru import logger
from scripts.utils import get_base_path, setup_logger
# 确保日志系统已初始化
setup_logger()
class SchedulerDB:
"""计划任务数据库管理类"""
_instance = None
@classmethod
def get_instance(cls) -> 'SchedulerDB':
"""获取单例实例"""
if not cls._instance:
cls._instance = cls()
return cls._instance
def __init__(self):
"""初始化数据库连接"""
if SchedulerDB._instance is not None:
return
base_path = get_base_path()
self.db_dir = os.path.join(base_path, 'output', 'database')
os.makedirs(self.db_dir, exist_ok=True)
self.db_path = os.path.join(self.db_dir, 'scheduler.db')
self.conn = sqlite3.connect(self.db_path, check_same_thread=False)
self.conn.row_factory = sqlite3.Row
# 创建所需的表
self._create_tables()
def _create_tables(self):
"""创建所需的数据库表"""
cursor = self.conn.cursor()
# 任务状态表 - 存储每个任务的最新状态
cursor.execute('''
CREATE TABLE IF NOT EXISTS task_status (
task_id TEXT PRIMARY KEY,
name TEXT,
last_run_time TEXT,
next_run_time TEXT,
last_status TEXT,
enabled INTEGER DEFAULT 1,
total_runs INTEGER DEFAULT 0,
success_runs INTEGER DEFAULT 0,
fail_runs INTEGER DEFAULT 0,
avg_duration REAL DEFAULT 0,
last_error TEXT,
last_modified TEXT,
priority INTEGER DEFAULT 0,
tags TEXT,
extra_data TEXT
)
''')
# 任务执行历史表 - 存储每次执行的详细信息
cursor.execute('''
CREATE TABLE IF NOT EXISTS task_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task_id TEXT,
start_time TEXT,
end_time TEXT,
duration REAL,
status TEXT,
error_message TEXT,
triggered_by TEXT,
output TEXT,
FOREIGN KEY (task_id) REFERENCES task_status (task_id)
)
''')
# 依赖任务执行记录表 - 记录任务链的执行情况
cursor.execute('''
CREATE TABLE IF NOT EXISTS task_chain_execution (
id INTEGER PRIMARY KEY AUTOINCREMENT,
chain_id TEXT,
start_task_id TEXT,
start_time TEXT,
end_time TEXT,
status TEXT,
tasks_executed TEXT,
tasks_succeeded TEXT,
tasks_failed TEXT
)
''')
self.conn.commit()
def get_all_task_status(self) -> List[Dict]:
"""获取所有任务的状态"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT * FROM task_status
''')
rows = cursor.fetchall()
result = []
for row in rows:
task_data = dict(row)
# 处理JSON字段
if task_data.get('extra_data'):
try:
task_data['extra_data'] = json.loads(task_data['extra_data'])
except:
task_data['extra_data'] = {}
if task_data.get('tags'):
try:
task_data['tags'] = json.loads(task_data['tags'])
except:
task_data['tags'] = []
result.append(task_data)
return result
def get_task_status(self, task_id: str) -> Optional[Dict]:
"""获取特定任务的状态"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT * FROM task_status WHERE task_id = ?
''', (task_id,))
row = cursor.fetchone()
if not row:
return None
task_data = dict(row)
# 处理JSON字段
if task_data.get('extra_data'):
try:
task_data['extra_data'] = json.loads(task_data['extra_data'])
except:
task_data['extra_data'] = {}
if task_data.get('tags'):
try:
task_data['tags'] = json.loads(task_data['tags'])
except:
task_data['tags'] = []
return task_data
def update_task_status(self, task_id: str, data: Dict) -> bool:
"""更新任务状态"""
try:
task_status = self.get_task_status(task_id)
cursor = self.conn.cursor()
# 检查task_status表是否有last_modified列
cursor.execute("PRAGMA table_info(task_status)")
columns = cursor.fetchall()
has_last_modified = any(col[1] == 'last_modified' for col in columns)
if task_status:
# 任务存在,更新
fields = []
values = []
for key, value in data.items():
# 确保列存在于表中
if key == 'last_modified' and not has_last_modified:
continue
fields.append(f"{key} = ?")
# 处理特殊字段
if key in ['extra_data', 'tags'] and value is not None:
values.append(json.dumps(value, ensure_ascii=False))
else:
values.append(value)
# 如果表中有last_modified列,才添加最后修改时间
if has_last_modified:
fields.append("last_modified = ?")
values.append(datetime.now().isoformat())
# 添加任务ID
values.append(task_id)
query = f'''
UPDATE task_status SET {", ".join(fields)} WHERE task_id = ?
'''
cursor.execute(query, values)
else:
# 任务不存在,创建新任务
fields = ['task_id']
placeholders = ['?']
values = [task_id]
for key, value in data.items():
# 确保列存在于表中
if key == 'last_modified' and not has_last_modified:
continue
fields.append(key)
placeholders.append('?')
# 处理特殊字段
if key in ['extra_data', 'tags'] and value is not None:
values.append(json.dumps(value, ensure_ascii=False))
else:
values.append(value)
# 如果表中有last_modified列,才添加最后修改时间
if has_last_modified:
fields.append("last_modified")
placeholders.append("?")
values.append(datetime.now().isoformat())
query = f'''
INSERT INTO task_status ({", ".join(fields)})
VALUES ({", ".join(placeholders)})
'''
cursor.execute(query, values)
self.conn.commit()
return True
except Exception as e:
logger.error(f"更新任务状态失败: {str(e)}")
return False
def record_task_execution(self, task_id: str,
start_time: str,
end_time: Optional[str] = None,
duration: Optional[float] = None,
status: str = "success",
error_message: Optional[str] = None,
triggered_by: Optional[str] = None,
output: Optional[str] = None) -> int:
"""记录任务执行"""
try:
cursor = self.conn.cursor()
# 如果提供了开始时间和结束时间,但没有提供持续时间,尝试计算
if start_time and end_time and duration is None:
try:
# 尝试解析时间字符串为datetime对象
start_dt = datetime.fromisoformat(start_time)
end_dt = datetime.fromisoformat(end_time)
duration = (end_dt - start_dt).total_seconds()
except Exception as e:
logger.warning(f"计算任务持续时间失败: {str(e)}")
# 插入执行记录
cursor.execute('''
INSERT INTO task_history
(task_id, start_time, end_time, duration, status, error_message, triggered_by, output)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
''', (
task_id,
start_time,
end_time,
duration,
status,
error_message,
triggered_by,
output
))
history_id = cursor.lastrowid
# 更新任务状态
task_status = self.get_task_status(task_id) or {}
# 基本更新字段
update_data = {
'last_run_time': start_time,
'last_status': status
}
# 计数更新
total_runs = task_status.get('total_runs', 0) + 1
update_data['total_runs'] = total_runs
if status == 'success':
success_runs = task_status.get('success_runs', 0) + 1
update_data['success_runs'] = success_runs
elif status == 'fail':
fail_runs = task_status.get('fail_runs', 0) + 1
update_data['fail_runs'] = fail_runs
update_data['last_error'] = error_message
# 更新平均执行时间
if duration is not None:
old_avg = float(task_status.get('avg_duration', 0))
if old_avg == 0:
update_data['avg_duration'] = duration
else:
# 使用移动平均值
update_data['avg_duration'] = (old_avg * (total_runs - 1) + duration) / total_runs
# 应用更新
self.update_task_status(task_id, update_data)
self.conn.commit()
return history_id
except Exception as e:
logger.error(f"记录任务执行失败: {str(e)}")
return -1
def get_task_execution_history(self, task_id: str, limit: int = 10) -> List[Dict]:
"""获取任务执行历史"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT * FROM task_history
WHERE task_id = ?
ORDER BY start_time DESC
LIMIT ?
''', (task_id, limit))
rows = cursor.fetchall()
return [dict(row) for row in rows]
def get_recent_task_executions(self, limit: int = 20) -> List[Dict]:
"""获取最近的任务执行记录"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT h.*, s.name
FROM task_history h
LEFT JOIN task_status s ON h.task_id = s.task_id
ORDER BY h.start_time DESC
LIMIT ?
''', (limit,))
rows = cursor.fetchall()
return [dict(row) for row in rows]
def record_chain_execution(self, chain_id: str, start_task_id: str,
tasks_executed: List[str], tasks_succeeded: List[str],
tasks_failed: List[str], status: str,
start_time: datetime, end_time: Optional[datetime] = None) -> int:
"""记录任务链执行"""
try:
cursor = self.conn.cursor()
cursor.execute('''
INSERT INTO task_chain_execution
(chain_id, start_task_id, start_time, end_time, status, tasks_executed, tasks_succeeded, tasks_failed)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
''', (
chain_id,
start_task_id,
start_time.isoformat(),
end_time.isoformat() if end_time else None,
status,
json.dumps(tasks_executed, ensure_ascii=False),
json.dumps(tasks_succeeded, ensure_ascii=False),
json.dumps(tasks_failed, ensure_ascii=False)
))
chain_id = cursor.lastrowid
self.conn.commit()
return chain_id
except Exception as e:
logger.error(f"记录任务链执行失败: {str(e)}")
return -1
def get_chain_execution_history(self, limit: int = 10) -> List[Dict]:
"""获取任务链执行历史"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT * FROM task_chain_execution
ORDER BY start_time DESC
LIMIT ?
''', (limit,))
rows = cursor.fetchall()
result = []
for row in rows:
chain_data = dict(row)
# 解析JSON字段
for field in ['tasks_executed', 'tasks_succeeded', 'tasks_failed']:
if chain_data.get(field):
try:
chain_data[field] = json.loads(chain_data[field])
except:
chain_data[field] = []
result.append(chain_data)
return result
def set_task_next_run(self, task_id: str, next_run_time: datetime) -> bool:
"""设置任务的下次执行时间"""
try:
self.update_task_status(task_id, {
'next_run_time': next_run_time.isoformat()
})
return True
except Exception as e:
logger.error(f"设置任务下次执行时间失败: {str(e)}")
return False
def enable_task(self, task_id: str, enabled: bool = True) -> bool:
"""启用或禁用任务"""
try:
self.update_task_status(task_id, {
'enabled': 1 if enabled else 0
})
return True
except Exception as e:
logger.error(f"{'启用' if enabled else '禁用'}任务失败: {str(e)}")
return False
def set_task_priority(self, task_id: str, priority: int) -> bool:
"""设置任务优先级"""
try:
self.update_task_status(task_id, {
'priority': priority
})
return True
except Exception as e:
logger.error(f"设置任务优先级失败: {str(e)}")
return False
def add_task_tags(self, task_id: str, tags: List[str]) -> bool:
"""添加任务标签"""
try:
task_status = self.get_task_status(task_id)
if not task_status:
return False
current_tags = task_status.get('tags', [])
if not isinstance(current_tags, list):
current_tags = []
# 添加新标签
for tag in tags:
if tag not in current_tags:
current_tags.append(tag)
self.update_task_status(task_id, {
'tags': current_tags
})
return True
except Exception as e:
logger.error(f"添加任务标签失败: {str(e)}")
return False
def remove_task_tags(self, task_id: str, tags: List[str]) -> bool:
"""移除任务标签"""
try:
task_status = self.get_task_status(task_id)
if not task_status:
return False
current_tags = task_status.get('tags', [])
if not isinstance(current_tags, list):
current_tags = []
# 移除标签
current_tags = [tag for tag in current_tags if tag not in tags]
self.update_task_status(task_id, {
'tags': current_tags
})
return True
except Exception as e:
logger.error(f"移除任务标签失败: {str(e)}")
return False
def record_chain_execution_start(self, chain_id: str, start_task_id: str, start_time: str) -> int:
"""记录任务链开始执行"""
try:
cursor = self.conn.cursor()
cursor.execute('''
INSERT INTO task_chain_execution
(chain_id, start_task_id, start_time, status)
VALUES (?, ?, ?, ?)
''', (
chain_id,
start_task_id,
start_time,
'running'
))
execution_id = cursor.lastrowid
self.conn.commit()
return execution_id
except Exception as e:
logger.error(f"记录任务链开始执行失败: {str(e)}")
return -1
def record_chain_execution_end(self, chain_id: str, end_time: str, status: str,
tasks_executed: int, tasks_succeeded: int, tasks_failed: int) -> bool:
"""记录任务链执行完成"""
try:
cursor = self.conn.cursor()
cursor.execute('''
UPDATE task_chain_execution
SET end_time = ?, status = ?,
tasks_executed = ?, tasks_succeeded = ?, tasks_failed = ?
WHERE chain_id = ?
''', (
end_time,
status,
tasks_executed,
tasks_succeeded,
tasks_failed,
chain_id
))
self.conn.commit()
return cursor.rowcount > 0
except Exception as e:
logger.error(f"记录任务链执行完成失败: {str(e)}")
return False
def close(self):
"""关闭数据库连接"""
if self.conn:
self.conn.close()
|
28harishkumar/blog
| 3,552
|
resources/assets/less/bootstrap/buttons.less
|
//
// Buttons
// --------------------------------------------------
// Base styles
// --------------------------------------------------
.btn {
display: inline-block;
margin-bottom: 0; // For input.btn
font-weight: @btn-font-weight;
text-align: center;
vertical-align: middle;
touch-action: manipulation;
cursor: pointer;
background-image: none; // Reset unusual Firefox-on-Android default style; see https://github.com/necolas/normalize.css/issues/214
border: 1px solid transparent;
white-space: nowrap;
.button-size(@padding-base-vertical; @padding-base-horizontal; @font-size-base; @line-height-base; @border-radius-base);
.user-select(none);
&,
&:active,
&.active {
&:focus,
&.focus {
.tab-focus();
}
}
&:hover,
&:focus,
&.focus {
color: @btn-default-color;
text-decoration: none;
}
&:active,
&.active {
outline: 0;
background-image: none;
.box-shadow(inset 0 3px 5px rgba(0,0,0,.125));
}
&.disabled,
&[disabled],
fieldset[disabled] & {
cursor: @cursor-disabled;
pointer-events: none; // Future-proof disabling of clicks
.opacity(.65);
.box-shadow(none);
}
}
// Alternate buttons
// --------------------------------------------------
.btn-default {
.button-variant(@btn-default-color; @btn-default-bg; @btn-default-border);
}
.btn-primary {
.button-variant(@btn-primary-color; @btn-primary-bg; @btn-primary-border);
}
// Success appears as green
.btn-success {
.button-variant(@btn-success-color; @btn-success-bg; @btn-success-border);
}
// Info appears as blue-green
.btn-info {
.button-variant(@btn-info-color; @btn-info-bg; @btn-info-border);
}
// Warning appears as orange
.btn-warning {
.button-variant(@btn-warning-color; @btn-warning-bg; @btn-warning-border);
}
// Danger and error appear as red
.btn-danger {
.button-variant(@btn-danger-color; @btn-danger-bg; @btn-danger-border);
}
// Link buttons
// -------------------------
// Make a button look and behave like a link
.btn-link {
color: @link-color;
font-weight: normal;
border-radius: 0;
&,
&:active,
&.active,
&[disabled],
fieldset[disabled] & {
background-color: transparent;
.box-shadow(none);
}
&,
&:hover,
&:focus,
&:active {
border-color: transparent;
}
&:hover,
&:focus {
color: @link-hover-color;
text-decoration: underline;
background-color: transparent;
}
&[disabled],
fieldset[disabled] & {
&:hover,
&:focus {
color: @btn-link-disabled-color;
text-decoration: none;
}
}
}
// Button Sizes
// --------------------------------------------------
.btn-lg {
// line-height: ensure even-numbered height of button next to large input
.button-size(@padding-large-vertical; @padding-large-horizontal; @font-size-large; @line-height-large; @border-radius-large);
}
.btn-sm {
// line-height: ensure proper height of button next to small input
.button-size(@padding-small-vertical; @padding-small-horizontal; @font-size-small; @line-height-small; @border-radius-small);
}
.btn-xs {
.button-size(@padding-xs-vertical; @padding-xs-horizontal; @font-size-small; @line-height-small; @border-radius-small);
}
// Block button
// --------------------------------------------------
.btn-block {
display: block;
width: 100%;
}
// Vertically space out multiple block buttons
.btn-block + .btn-block {
margin-top: 5px;
}
// Specificity overrides
input[type="submit"],
input[type="reset"],
input[type="button"] {
&.btn-block {
width: 100%;
}
}
|
281677160/openwrt-package
| 17,827
|
luci-app-ssr-plus/shadowsocks-libev/patches/100-Upgrade-PCRE-to-PCRE2.patch
|
From d4f4d9761cbd41c3ab6de79383ff39b9f97bf452 Mon Sep 17 00:00:00 2001
From: Syrone Wong <[email protected]>
Date: Sat, 18 Nov 2017 20:06:50 +0800
Subject: [PATCH] Upgrade PCRE to PCRE2
- Use 8bit variant by default
This comes from a PR closed and never reopen as at times PCRE2 was too
new(???.)
Ref: https://github.com/shadowsocks/shadowsocks-libev/pull/1792
Signed-off-by: Syrone Wong <[email protected]>
[ squash the first 2 patch from PR, drop the last one ]
Signed-off-by: Christian Marangi <[email protected]>
---
.travis.yml | 9 ++-
configure.ac | 8 +--
m4/pcre.m4 | 152 ------------------------------------------
m4/pcre2.m4 | 181 +++++++++++++++++++++++++++++++++++++++++++++++++++
src/rule.c | 53 ++++++++++++---
src/rule.h | 23 +++++--
6 files changed, 253 insertions(+), 173 deletions(-)
delete mode 100644 m4/pcre.m4
create mode 100644 m4/pcre2.m4
# diff --git a/.travis.yml b/.travis.yml
# index ee3424c..e7da08c 100644
# --- a/.travis.yml
# +++ b/.travis.yml
# @@ -11,11 +11,12 @@ env:
# global:
# - LIBSODIUM_VER=1.0.12
# - MBEDTLS_VER=2.4.0
# + - PCRE2_VER=10.30
# before_install:
# - |
# if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
# # All dependencies for macOS build. Some packages has been installed by travis so use reinstall.
# - brew reinstall autoconf automake xmlto c-ares libev mbedtls libsodium asciidoc >> /dev/null 2>&1;
# + brew reinstall autoconf automake xmlto pcre2 c-ares libev mbedtls libsodium asciidoc >> /dev/null 2>&1;
# else
# wget https://github.com/jedisct1/libsodium/releases/download/$LIBSODIUM_VER/libsodium-$LIBSODIUM_VER.tar.gz;
# tar xvf libsodium-$LIBSODIUM_VER.tar.gz;
# @@ -29,6 +30,12 @@ before_install:
# make SHARED=1;
# sudo make install;
# popd;
# + wget https://ftp.pcre.org/pub/pcre/pcre2-$PCRE2_VER.tar.gz;
# + tar xvf pcre2-$PCRE2_VER.tar.gz;
# + pushd pcre2-$PCRE2_VER;
# + ./configure --prefix=/usr --enable-pcre2-16 --enable-pcre2-32 && make;
# + sudo make install;
# + popd;
# # Load cached docker images
# if [[ -d $HOME/docker ]]; then
# ls $HOME/docker/*.tar.gz | xargs -I {file} sh -c "zcat {file} | docker load";
--- a/configure.ac
+++ b/configure.ac
@@ -20,10 +20,10 @@ AC_DISABLE_STATIC
AC_DISABLE_SHARED
LT_INIT([dlopen])
-dnl Check for pcre library
-TS_CHECK_PCRE
-if test "x${enable_pcre}" != "xyes"; then
- AC_MSG_ERROR([Cannot find pcre library. Configure --with-pcre=DIR])
+dnl Check for pcre2 library
+TS_CHECK_PCRE2
+if test "x${enable_pcre2}" != "xyes"; then
+ AC_MSG_ERROR([Cannot find pcre2 library. Configure --with-pcre2=DIR])
fi
dnl Checks for using shared libraries from system
--- a/m4/pcre.m4
+++ /dev/null
@@ -1,152 +0,0 @@
-dnl -------------------------------------------------------- -*- autoconf -*-
-dnl Licensed to the Apache Software Foundation (ASF) under one or more
-dnl contributor license agreements. See the NOTICE file distributed with
-dnl this work for additional information regarding copyright ownership.
-dnl The ASF licenses this file to You under the Apache License, Version 2.0
-dnl (the "License"); you may not use this file except in compliance with
-dnl the License. You may obtain a copy of the License at
-dnl
-dnl http://www.apache.org/licenses/LICENSE-2.0
-dnl
-dnl Unless required by applicable law or agreed to in writing, software
-dnl distributed under the License is distributed on an "AS IS" BASIS,
-dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-dnl See the License for the specific language governing permissions and
-dnl limitations under the License.
-
-dnl
-dnl TS_ADDTO(variable, value)
-dnl
-dnl Add value to variable
-dnl
-AC_DEFUN([TS_ADDTO], [
- if test "x$$1" = "x"; then
- test "x$verbose" = "xyes" && echo " setting $1 to \"$2\""
- $1="$2"
- else
- ats_addto_bugger="$2"
- for i in $ats_addto_bugger; do
- ats_addto_duplicate="0"
- for j in $$1; do
- if test "x$i" = "x$j"; then
- ats_addto_duplicate="1"
- break
- fi
- done
- if test $ats_addto_duplicate = "0"; then
- test "x$verbose" = "xyes" && echo " adding \"$i\" to $1"
- $1="$$1 $i"
- fi
- done
- fi
-])dnl
-
-dnl
-dnl TS_ADDTO_RPATH(path)
-dnl
-dnl Adds path to variable with the '-rpath' directive.
-dnl
-AC_DEFUN([TS_ADDTO_RPATH], [
- AC_MSG_NOTICE([adding $1 to RPATH])
- TS_ADDTO(LIBTOOL_LINK_FLAGS, [-R$1])
-])dnl
-
-dnl
-dnl pcre.m4: Trafficserver's pcre autoconf macros
-dnl
-
-dnl
-dnl TS_CHECK_PCRE: look for pcre libraries and headers
-dnl
-AC_DEFUN([TS_CHECK_PCRE], [
-enable_pcre=no
-AC_ARG_WITH(pcre, [AC_HELP_STRING([--with-pcre=DIR],[use a specific pcre library])],
-[
- if test "x$withval" != "xyes" && test "x$withval" != "x"; then
- pcre_base_dir="$withval"
- if test "$withval" != "no"; then
- enable_pcre=yes
- case "$withval" in
- *":"*)
- pcre_include="`echo $withval |sed -e 's/:.*$//'`"
- pcre_ldflags="`echo $withval |sed -e 's/^.*://'`"
- AC_MSG_CHECKING(checking for pcre includes in $pcre_include libs in $pcre_ldflags )
- ;;
- *)
- pcre_include="$withval/include"
- pcre_ldflags="$withval/lib"
- AC_MSG_CHECKING(checking for pcre includes in $withval)
- ;;
- esac
- fi
- fi
-],
-[
- AC_CHECK_PROG(PCRE_CONFIG, pcre-config, pcre-config)
- if test "x$PCRE_CONFIG" != "x"; then
- enable_pcre=yes
- pcre_base_dir="`$PCRE_CONFIG --prefix`"
- pcre_include="`$PCRE_CONFIG --cflags | sed -es/-I//`"
- pcre_ldflags="`$PCRE_CONFIG --libs | sed -es/-lpcre// -es/-L//`"
- fi
-])
-
-if test "x$pcre_base_dir" = "x"; then
- AC_MSG_CHECKING([for pcre location])
- AC_CACHE_VAL(ats_cv_pcre_dir,[
- for dir in /usr/local /usr ; do
- if test -d $dir && ( test -f $dir/include/pcre.h || test -f $dir/include/pcre/pcre.h ); then
- ats_cv_pcre_dir=$dir
- break
- fi
- done
- ])
- pcre_base_dir=$ats_cv_pcre_dir
- if test "x$pcre_base_dir" = "x"; then
- enable_pcre=no
- AC_MSG_RESULT([not found])
- else
- enable_pcre=yes
- pcre_include="$pcre_base_dir/include"
- pcre_ldflags="$pcre_base_dir/lib"
- AC_MSG_RESULT([$pcre_base_dir])
- fi
-else
- AC_MSG_CHECKING(for pcre headers in $pcre_include)
- if test -d $pcre_include && test -d $pcre_ldflags && ( test -f $pcre_include/pcre.h || test -f $pcre_include/pcre/pcre.h ); then
- AC_MSG_RESULT([ok])
- else
- AC_MSG_RESULT([not found])
- fi
-fi
-
-pcreh=0
-pcre_pcreh=0
-if test "$enable_pcre" != "no"; then
- saved_ldflags=$LDFLAGS
- saved_cppflags=$CFLAGS
- pcre_have_headers=0
- pcre_have_libs=0
- if test "$pcre_base_dir" != "/usr"; then
- TS_ADDTO(CFLAGS, [-I${pcre_include}])
- TS_ADDTO(CFLAGS, [-DPCRE_STATIC])
- TS_ADDTO(LDFLAGS, [-L${pcre_ldflags}])
- TS_ADDTO_RPATH(${pcre_ldflags})
- fi
- AC_SEARCH_LIBS([pcre_exec], [pcre], [pcre_have_libs=1])
- if test "$pcre_have_libs" != "0"; then
- AC_CHECK_HEADERS(pcre.h, [pcre_have_headers=1])
- AC_CHECK_HEADERS(pcre/pcre.h, [pcre_have_headers=1])
- fi
- if test "$pcre_have_headers" != "0"; then
- AC_DEFINE(HAVE_LIBPCRE,1,[Compiling with pcre support])
- AC_SUBST(LIBPCRE, [-lpcre])
- else
- enable_pcre=no
- CFLAGS=$saved_cppflags
- LDFLAGS=$saved_ldflags
- fi
-fi
-AC_SUBST(pcreh)
-AC_SUBST(pcre_pcreh)
-])
--- /dev/null
+++ b/m4/pcre2.m4
@@ -0,0 +1,181 @@
+dnl -------------------------------------------------------- -*- autoconf -*-
+dnl Licensed to the Apache Software Foundation (ASF) under one or more
+dnl contributor license agreements. See the NOTICE file distributed with
+dnl this work for additional information regarding copyright ownership.
+dnl The ASF licenses this file to You under the Apache License, Version 2.0
+dnl (the "License"); you may not use this file except in compliance with
+dnl the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing, software
+dnl distributed under the License is distributed on an "AS IS" BASIS,
+dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+dnl See the License for the specific language governing permissions and
+dnl limitations under the License.
+
+dnl Modified by Syrone Wong <[email protected]> to support pcre2 8bit variant only
+
+dnl
+dnl TS_ADDTO(variable, value)
+dnl
+dnl Add value to variable
+dnl
+AC_DEFUN([TS_ADDTO], [
+ if test "x$$1" = "x"; then
+ test "x$verbose" = "xyes" && echo " setting $1 to \"$2\""
+ $1="$2"
+ else
+ ats_addto_bugger="$2"
+ for i in $ats_addto_bugger; do
+ ats_addto_duplicate="0"
+ for j in $$1; do
+ if test "x$i" = "x$j"; then
+ ats_addto_duplicate="1"
+ break
+ fi
+ done
+ if test $ats_addto_duplicate = "0"; then
+ test "x$verbose" = "xyes" && echo " adding \"$i\" to $1"
+ $1="$$1 $i"
+ fi
+ done
+ fi
+])dnl
+
+dnl
+dnl TS_ADDTO_RPATH(path)
+dnl
+dnl Adds path to variable with the '-rpath' directive.
+dnl
+AC_DEFUN([TS_ADDTO_RPATH], [
+ AC_MSG_NOTICE([adding $1 to RPATH])
+ TS_ADDTO(LIBTOOL_LINK_FLAGS, [-R$1])
+])dnl
+
+dnl
+dnl pcre2.m4: Trafficserver's pcre2 autoconf macros
+dnl
+
+dnl
+dnl TS_CHECK_PCRE2: look for pcre2 libraries and headers
+dnl
+AC_DEFUN([TS_CHECK_PCRE2], [
+enable_pcre2=no
+AC_ARG_WITH(pcre2, [AC_HELP_STRING([--with-pcre2=DIR],[use a specific pcre2 library])],
+[
+ if test "x$withval" != "xyes" && test "x$withval" != "x"; then
+ pcre2_base_dir="$withval"
+ if test "$withval" != "no"; then
+ enable_pcre2=yes
+ case "$withval" in
+ *":"*)
+ pcre2_include="`echo $withval |sed -e 's/:.*$//'`"
+ pcre2_ldflags="`echo $withval |sed -e 's/^.*://'`"
+ AC_MSG_CHECKING(checking for pcre2 includes in $pcre2_include libs in $pcre2_ldflags )
+ ;;
+ *)
+ pcre2_include="$withval/include"
+ pcre2_ldflags="$withval/lib"
+ AC_MSG_CHECKING(checking for pcre2 includes in $withval)
+ ;;
+ esac
+ fi
+ fi
+],
+[
+ AC_CHECK_PROG(PCRE2_CONFIG, pcre2-config, pcre2-config)
+ if test "x$PCRE2_CONFIG" != "x"; then
+ enable_pcre2=yes
+ pcre2_base_dir="`$PCRE2_CONFIG --prefix`"
+ pcre2_include="`$PCRE2_CONFIG --cflags | sed -es/-I//`"
+ pcre2_ldflags="`$PCRE2_CONFIG --libs8 | sed -es/-lpcre2-8// -es/-L//`"
+ fi
+])
+
+if test "x$pcre2_base_dir" = "x"; then
+ AC_MSG_CHECKING([for pcre2 location])
+ AC_CACHE_VAL(ats_cv_pcre2_dir,[
+ for dir in /usr/local /usr ; do
+ if test -d $dir && ( test -f $dir/include/pcre2.h || test -f $dir/include/pcre2/pcre2.h ); then
+ ats_cv_pcre2_dir=$dir
+ break
+ fi
+ done
+ ])
+ pcre2_base_dir=$ats_cv_pcre2_dir
+ if test "x$pcre2_base_dir" = "x"; then
+ enable_pcre2=no
+ AC_MSG_RESULT([not found])
+ else
+ enable_pcre2=yes
+ pcre2_include="$pcre2_base_dir/include"
+ pcre2_ldflags="$pcre2_base_dir/lib"
+ AC_MSG_RESULT([$pcre2_base_dir])
+ fi
+else
+ AC_MSG_CHECKING(for pcre2 headers in $pcre2_include)
+ if test -d $pcre2_include && test -d $pcre2_ldflags && ( test -f $pcre2_include/pcre2.h || test -f $pcre2_include/pcre2/pcre2.h ); then
+ AC_MSG_RESULT([ok])
+ else
+ AC_MSG_RESULT([not found])
+ fi
+fi
+
+pcre2h=0
+pcre2_pcre2h=0
+if test "$enable_pcre2" != "no"; then
+ saved_ldflags=$LDFLAGS
+ saved_cppflags=$CFLAGS
+ pcre2_have_headers=0
+ pcre2_have_libs=0
+ if test "$pcre2_base_dir" != "/usr"; then
+ TS_ADDTO(CFLAGS, [-I${pcre2_include}])
+ TS_ADDTO(CFLAGS, [-DPCRE2_STATIC])
+ TS_ADDTO(LDFLAGS, [-L${pcre2_ldflags}])
+ TS_ADDTO_RPATH(${pcre2_ldflags})
+ fi
+ AC_SEARCH_LIBS([pcre2_match_8], [pcre2-8], [pcre2_have_libs=1])
+ if test "$pcre2_have_libs" != "0"; then
+ AC_MSG_CHECKING([pcre2.h])
+ AC_COMPILE_IFELSE(
+ [AC_LANG_PROGRAM(
+ [[
+#define PCRE2_CODE_UNIT_WIDTH 8
+#include <pcre2.h>
+ ]],
+ [[
+ ]]
+ )],
+ [pcre2_have_headers=1
+ AC_MSG_RESULT([ok])],
+ [AC_MSG_RESULT([not found])]
+ )
+
+ AC_MSG_CHECKING([pcre2/pcre2.h])
+ AC_COMPILE_IFELSE(
+ [AC_LANG_PROGRAM(
+ [[
+#define PCRE2_CODE_UNIT_WIDTH 8
+#include <pcre2/pcre2.h>
+ ]],
+ [[
+ ]]
+ )],
+ [pcre2_have_headers=1
+ AC_MSG_RESULT([ok])],
+ [AC_MSG_RESULT([not found])]
+ )
+ fi
+ if test "$pcre2_have_headers" != "0"; then
+ AC_DEFINE(HAVE_LIBPCRE2,1,[Compiling with pcre2 support])
+ AC_SUBST(LIBPCRE2, [-lpcre2-8])
+ else
+ enable_pcre2=no
+ CFLAGS=$saved_cppflags
+ LDFLAGS=$saved_ldflags
+ fi
+fi
+AC_SUBST(pcre2h)
+AC_SUBST(pcre2_pcre2h)
+])
--- a/src/rule.c
+++ b/src/rule.c
@@ -1,6 +1,7 @@
/*
* Copyright (c) 2011 and 2012, Dustin Lundquist <[email protected]>
* Copyright (c) 2011 Manuel Kasper <[email protected]>
+ * Copyright (c) 2017 Syrone Wong <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@@ -74,18 +75,37 @@ add_rule(struct cork_dllist *rules, rule
cork_dllist_add(rules, &rule->entries);
}
+/*
+ * XXX: As pattern and subject are char arguments, they can be straightforwardly
+ * cast to PCRE2_SPTR as we are working in 8-bit code units.
+ */
+
int
init_rule(rule_t *rule)
{
if (rule->pattern_re == NULL) {
- const char *reerr;
- int reerroffset;
+ int errornumber;
+ PCRE2_SIZE erroroffset;
+ rule->pattern_re = pcre2_compile(
+ (PCRE2_SPTR)rule->pattern, /* the pattern */
+ PCRE2_ZERO_TERMINATED, /* indicates pattern is zero-terminated */
+ 0, /* default options */
+ &errornumber, /* for error number */
+ &erroroffset, /* for error offset */
+ NULL); /* use default compile context */
- rule->pattern_re =
- pcre_compile(rule->pattern, 0, &reerr, &reerroffset, NULL);
if (rule->pattern_re == NULL) {
- LOGE("Regex compilation of \"%s\" failed: %s, offset %d",
- rule->pattern, reerr, reerroffset);
+ PCRE2_UCHAR errbuffer[512];
+ pcre2_get_error_message(errornumber, errbuffer, sizeof(errbuffer));
+ LOGE("PCRE2 regex compilation failed at offset %d: %s\n", (int)erroroffset,
+ errbuffer);
+ return 0;
+ }
+
+ rule->pattern_re_match_data = pcre2_match_data_create_from_pattern(rule->pattern_re, NULL);
+
+ if (rule->pattern_re_match_data == NULL) {
+ ERROR("PCRE2: the memory for the block could not be obtained");
return 0;
}
}
@@ -105,8 +125,15 @@ lookup_rule(const struct cork_dllist *ru
cork_dllist_foreach_void(rules, curr, next) {
rule_t *rule = cork_container_of(curr, rule_t, entries);
- if (pcre_exec(rule->pattern_re, NULL,
- name, name_len, 0, 0, NULL, 0) >= 0)
+ if (pcre2_match(
+ rule->pattern_re, /* the compiled pattern */
+ (PCRE2_SPTR)name, /* the subject string */
+ name_len, /* the length of the subject */
+ 0, /* start at offset 0 in the subject */
+ 0, /* default options */
+ rule->pattern_re_match_data, /* block for storing the result */
+ NULL /* use default match context */
+ ) >= 0)
return rule;
}
@@ -127,7 +154,13 @@ free_rule(rule_t *rule)
return;
ss_free(rule->pattern);
- if (rule->pattern_re != NULL)
- pcre_free(rule->pattern_re);
+ if (rule->pattern_re != NULL) {
+ pcre2_code_free(rule->pattern_re); /* data and the compiled pattern. */
+ rule->pattern_re = NULL;
+ }
+ if (rule->pattern_re_match_data != NULL) {
+ pcre2_match_data_free(rule->pattern_re_match_data); /* Release memory used for the match */
+ rule->pattern_re_match_data = NULL;
+ }
ss_free(rule);
}
--- a/src/rule.h
+++ b/src/rule.h
@@ -1,6 +1,7 @@
/*
* Copyright (c) 2011 and 2012, Dustin Lundquist <[email protected]>
* Copyright (c) 2011 Manuel Kasper <[email protected]>
+ * Copyright (c) 2017 Syrone Wong <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@@ -33,17 +34,27 @@
#include <libcork/ds.h>
-#ifdef HAVE_PCRE_H
-#include <pcre.h>
-#elif HAVE_PCRE_PCRE_H
-#include <pcre/pcre.h>
-#endif
+/*
+ * The PCRE2_CODE_UNIT_WIDTH macro must be defined before including pcre2.h.
+ * For a program that uses only one code unit width, setting it to 8, 16, or 32
+ * makes it possible to use generic function names such as pcre2_compile(). Note
+ * that just changing 8 to 16 (for example) is not sufficient to convert this
+ * program to process 16-bit characters. Even in a fully 16-bit environment, where
+ * string-handling functions such as strcmp() and printf() work with 16-bit
+ * characters, the code for handling the table of named substrings will still need
+ * to be modified.
+ */
+/* we only need to support ASCII chartable, thus set it to 8 */
+#define PCRE2_CODE_UNIT_WIDTH 8
+
+#include <pcre2.h>
typedef struct rule {
char *pattern;
/* Runtime fields */
- pcre *pattern_re;
+ pcre2_code *pattern_re;
+ pcre2_match_data *pattern_re_match_data;
struct cork_dllist_item entries;
} rule_t;
|
2929004360/ruoyi-sign
| 1,857
|
ruoyi-common/src/main/java/com/ruoyi/common/core/page/PageDomain.java
|
package com.ruoyi.common.core.page;
import com.ruoyi.common.utils.StringUtils;
/**
* 分页数据
*
* @author ruoyi
*/
public class PageDomain
{
/** 当前记录起始索引 */
private Integer pageNum;
/** 每页显示记录数 */
private Integer pageSize;
/** 排序列 */
private String orderByColumn;
/** 排序的方向desc或者asc */
private String isAsc = "asc";
/** 分页参数合理化 */
private Boolean reasonable = true;
public String getOrderBy()
{
if (StringUtils.isEmpty(orderByColumn))
{
return "";
}
return StringUtils.toUnderScoreCase(orderByColumn) + " " + isAsc;
}
public Integer getPageNum()
{
return pageNum;
}
public void setPageNum(Integer pageNum)
{
this.pageNum = pageNum;
}
public Integer getPageSize()
{
return pageSize;
}
public void setPageSize(Integer pageSize)
{
this.pageSize = pageSize;
}
public String getOrderByColumn()
{
return orderByColumn;
}
public void setOrderByColumn(String orderByColumn)
{
this.orderByColumn = orderByColumn;
}
public String getIsAsc()
{
return isAsc;
}
public void setIsAsc(String isAsc)
{
if (StringUtils.isNotEmpty(isAsc))
{
// 兼容前端排序类型
if ("ascending".equals(isAsc))
{
isAsc = "asc";
}
else if ("descending".equals(isAsc))
{
isAsc = "desc";
}
this.isAsc = isAsc;
}
}
public Boolean getReasonable()
{
if (StringUtils.isNull(reasonable))
{
return Boolean.TRUE;
}
return reasonable;
}
public void setReasonable(Boolean reasonable)
{
this.reasonable = reasonable;
}
}
|
28harishkumar/blog
| 5,318
|
resources/assets/less/bootstrap/carousel.less
|
//
// Carousel
// --------------------------------------------------
// Wrapper for the slide container and indicators
.carousel {
position: relative;
}
.carousel-inner {
position: relative;
overflow: hidden;
width: 100%;
> .item {
display: none;
position: relative;
.transition(.6s ease-in-out left);
// Account for jankitude on images
> img,
> a > img {
&:extend(.img-responsive);
line-height: 1;
}
// WebKit CSS3 transforms for supported devices
@media all and (transform-3d), (-webkit-transform-3d) {
transition: transform .6s ease-in-out;
backface-visibility: hidden;
perspective: 1000;
&.next,
&.active.right {
transform: translate3d(100%, 0, 0);
left: 0;
}
&.prev,
&.active.left {
transform: translate3d(-100%, 0, 0);
left: 0;
}
&.next.left,
&.prev.right,
&.active {
transform: translate3d(0, 0, 0);
left: 0;
}
}
}
> .active,
> .next,
> .prev {
display: block;
}
> .active {
left: 0;
}
> .next,
> .prev {
position: absolute;
top: 0;
width: 100%;
}
> .next {
left: 100%;
}
> .prev {
left: -100%;
}
> .next.left,
> .prev.right {
left: 0;
}
> .active.left {
left: -100%;
}
> .active.right {
left: 100%;
}
}
// Left/right controls for nav
// ---------------------------
.carousel-control {
position: absolute;
top: 0;
left: 0;
bottom: 0;
width: @carousel-control-width;
.opacity(@carousel-control-opacity);
font-size: @carousel-control-font-size;
color: @carousel-control-color;
text-align: center;
text-shadow: @carousel-text-shadow;
// We can't have this transition here because WebKit cancels the carousel
// animation if you trip this while in the middle of another animation.
// Set gradients for backgrounds
&.left {
#gradient > .horizontal(@start-color: rgba(0,0,0,.5); @end-color: rgba(0,0,0,.0001));
}
&.right {
left: auto;
right: 0;
#gradient > .horizontal(@start-color: rgba(0,0,0,.0001); @end-color: rgba(0,0,0,.5));
}
// Hover/focus state
&:hover,
&:focus {
outline: 0;
color: @carousel-control-color;
text-decoration: none;
.opacity(.9);
}
// Toggles
.icon-prev,
.icon-next,
.glyphicon-chevron-left,
.glyphicon-chevron-right {
position: absolute;
top: 50%;
z-index: 5;
display: inline-block;
}
.icon-prev,
.glyphicon-chevron-left {
left: 50%;
margin-left: -10px;
}
.icon-next,
.glyphicon-chevron-right {
right: 50%;
margin-right: -10px;
}
.icon-prev,
.icon-next {
width: 20px;
height: 20px;
margin-top: -10px;
font-family: serif;
}
.icon-prev {
&:before {
content: '\2039';// SINGLE LEFT-POINTING ANGLE QUOTATION MARK (U+2039)
}
}
.icon-next {
&:before {
content: '\203a';// SINGLE RIGHT-POINTING ANGLE QUOTATION MARK (U+203A)
}
}
}
// Optional indicator pips
//
// Add an unordered list with the following class and add a list item for each
// slide your carousel holds.
.carousel-indicators {
position: absolute;
bottom: 10px;
left: 50%;
z-index: 15;
width: 60%;
margin-left: -30%;
padding-left: 0;
list-style: none;
text-align: center;
li {
display: inline-block;
width: 10px;
height: 10px;
margin: 1px;
text-indent: -999px;
border: 1px solid @carousel-indicator-border-color;
border-radius: 10px;
cursor: pointer;
// IE8-9 hack for event handling
//
// Internet Explorer 8-9 does not support clicks on elements without a set
// `background-color`. We cannot use `filter` since that's not viewed as a
// background color by the browser. Thus, a hack is needed.
//
// For IE8, we set solid black as it doesn't support `rgba()`. For IE9, we
// set alpha transparency for the best results possible.
background-color: #000 \9; // IE8
background-color: rgba(0,0,0,0); // IE9
}
.active {
margin: 0;
width: 12px;
height: 12px;
background-color: @carousel-indicator-active-bg;
}
}
// Optional captions
// -----------------------------
// Hidden by default for smaller viewports
.carousel-caption {
position: absolute;
left: 15%;
right: 15%;
bottom: 20px;
z-index: 10;
padding-top: 20px;
padding-bottom: 20px;
color: @carousel-caption-color;
text-align: center;
text-shadow: @carousel-text-shadow;
& .btn {
text-shadow: none; // No shadow for button elements in carousel-caption
}
}
// Scale up controls for tablets and up
@media screen and (min-width: @screen-sm-min) {
// Scale up the controls a smidge
.carousel-control {
.glyphicon-chevron-left,
.glyphicon-chevron-right,
.icon-prev,
.icon-next {
width: 30px;
height: 30px;
margin-top: -15px;
font-size: 30px;
}
.glyphicon-chevron-left,
.icon-prev {
margin-left: -15px;
}
.glyphicon-chevron-right,
.icon-next {
margin-right: -15px;
}
}
// Show and left align the captions
.carousel-caption {
left: 20%;
right: 20%;
padding-bottom: 30px;
}
// Move up the indicators
.carousel-indicators {
bottom: 20px;
}
}
|
2929004360/ruoyi-sign
| 1,218
|
ruoyi-common/src/main/java/com/ruoyi/common/core/page/TableDataInfo.java
|
package com.ruoyi.common.core.page;
import java.io.Serializable;
import java.util.List;
/**
* 表格分页数据对象
*
* @author ruoyi
*/
public class TableDataInfo implements Serializable
{
private static final long serialVersionUID = 1L;
/** 总记录数 */
private long total;
/** 列表数据 */
private List<?> rows;
/** 消息状态码 */
private int code;
/** 消息内容 */
private String msg;
/**
* 表格数据对象
*/
public TableDataInfo()
{
}
/**
* 分页
*
* @param list 列表数据
* @param total 总记录数
*/
public TableDataInfo(List<?> list, int total)
{
this.rows = list;
this.total = total;
}
public long getTotal()
{
return total;
}
public void setTotal(long total)
{
this.total = total;
}
public List<?> getRows()
{
return rows;
}
public void setRows(List<?> rows)
{
this.rows = rows;
}
public int getCode()
{
return code;
}
public void setCode(int code)
{
this.code = code;
}
public String getMsg()
{
return msg;
}
public void setMsg(String msg)
{
this.msg = msg;
}
}
|
2929004360/ruoyi-sign
| 3,130
|
ruoyi-common/src/main/java/com/ruoyi/common/core/text/StrFormatter.java
|
package com.ruoyi.common.core.text;
import com.ruoyi.common.utils.StringUtils;
/**
* 字符串格式化
*
* @author ruoyi
*/
public class StrFormatter
{
public static final String EMPTY_JSON = "{}";
public static final char C_BACKSLASH = '\\';
public static final char C_DELIM_START = '{';
public static final char C_DELIM_END = '}';
/**
* 格式化字符串<br>
* 此方法只是简单将占位符 {} 按照顺序替换为参数<br>
* 如果想输出 {} 使用 \\转义 { 即可,如果想输出 {} 之前的 \ 使用双转义符 \\\\ 即可<br>
* 例:<br>
* 通常使用:format("this is {} for {}", "a", "b") -> this is a for b<br>
* 转义{}: format("this is \\{} for {}", "a", "b") -> this is \{} for a<br>
* 转义\: format("this is \\\\{} for {}", "a", "b") -> this is \a for b<br>
*
* @param strPattern 字符串模板
* @param argArray 参数列表
* @return 结果
*/
public static String format(final String strPattern, final Object... argArray)
{
if (StringUtils.isEmpty(strPattern) || StringUtils.isEmpty(argArray))
{
return strPattern;
}
final int strPatternLength = strPattern.length();
// 初始化定义好的长度以获得更好的性能
StringBuilder sbuf = new StringBuilder(strPatternLength + 50);
int handledPosition = 0;
int delimIndex;// 占位符所在位置
for (int argIndex = 0; argIndex < argArray.length; argIndex++)
{
delimIndex = strPattern.indexOf(EMPTY_JSON, handledPosition);
if (delimIndex == -1)
{
if (handledPosition == 0)
{
return strPattern;
}
else
{ // 字符串模板剩余部分不再包含占位符,加入剩余部分后返回结果
sbuf.append(strPattern, handledPosition, strPatternLength);
return sbuf.toString();
}
}
else
{
if (delimIndex > 0 && strPattern.charAt(delimIndex - 1) == C_BACKSLASH)
{
if (delimIndex > 1 && strPattern.charAt(delimIndex - 2) == C_BACKSLASH)
{
// 转义符之前还有一个转义符,占位符依旧有效
sbuf.append(strPattern, handledPosition, delimIndex - 1);
sbuf.append(Convert.utf8Str(argArray[argIndex]));
handledPosition = delimIndex + 2;
}
else
{
// 占位符被转义
argIndex--;
sbuf.append(strPattern, handledPosition, delimIndex - 1);
sbuf.append(C_DELIM_START);
handledPosition = delimIndex + 1;
}
}
else
{
// 正常占位符
sbuf.append(strPattern, handledPosition, delimIndex);
sbuf.append(Convert.utf8Str(argArray[argIndex]));
handledPosition = delimIndex + 2;
}
}
}
// 加入最后一个占位符后所有的字符
sbuf.append(strPattern, handledPosition, strPattern.length());
return sbuf.toString();
}
}
|
2929004360/ruoyi-sign
| 2,154
|
ruoyi-common/src/main/java/com/ruoyi/common/core/text/CharsetKit.java
|
package com.ruoyi.common.core.text;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import com.ruoyi.common.utils.StringUtils;
/**
* 字符集工具类
*
* @author ruoyi
*/
public class CharsetKit
{
/** ISO-8859-1 */
public static final String ISO_8859_1 = "ISO-8859-1";
/** UTF-8 */
public static final String UTF_8 = "UTF-8";
/** GBK */
public static final String GBK = "GBK";
/** ISO-8859-1 */
public static final Charset CHARSET_ISO_8859_1 = Charset.forName(ISO_8859_1);
/** UTF-8 */
public static final Charset CHARSET_UTF_8 = Charset.forName(UTF_8);
/** GBK */
public static final Charset CHARSET_GBK = Charset.forName(GBK);
/**
* 转换为Charset对象
*
* @param charset 字符集,为空则返回默认字符集
* @return Charset
*/
public static Charset charset(String charset)
{
return StringUtils.isEmpty(charset) ? Charset.defaultCharset() : Charset.forName(charset);
}
/**
* 转换字符串的字符集编码
*
* @param source 字符串
* @param srcCharset 源字符集,默认ISO-8859-1
* @param destCharset 目标字符集,默认UTF-8
* @return 转换后的字符集
*/
public static String convert(String source, String srcCharset, String destCharset)
{
return convert(source, Charset.forName(srcCharset), Charset.forName(destCharset));
}
/**
* 转换字符串的字符集编码
*
* @param source 字符串
* @param srcCharset 源字符集,默认ISO-8859-1
* @param destCharset 目标字符集,默认UTF-8
* @return 转换后的字符集
*/
public static String convert(String source, Charset srcCharset, Charset destCharset)
{
if (null == srcCharset)
{
srcCharset = StandardCharsets.ISO_8859_1;
}
if (null == destCharset)
{
destCharset = StandardCharsets.UTF_8;
}
if (StringUtils.isEmpty(source) || srcCharset.equals(destCharset))
{
return source;
}
return new String(source.getBytes(srcCharset), destCharset);
}
/**
* @return 系统字符集编码
*/
public static String systemCharset()
{
return Charset.defaultCharset().name();
}
}
|
281677160/openwrt-package
| 3,300
|
luci-app-ssr-plus/shadowsocks-libev/patches/102-deprecate-load16-be-replace-with-ntohs.patch
|
From f4ee43fa27e00a573d90a8cac68f12655570bbf7 Mon Sep 17 00:00:00 2001
From: lwb1978 <[email protected]>
Date: Tue, 4 Feb 2025 15:51:17 +0800
Subject: [PATCH] Deprecate load16_be() function in favor to ntohs() function
---
src/aead.c | 2 +-
src/local.c | 6 +++---
src/server.c | 2 +-
src/udprelay.c | 2 +-
src/utils.c | 8 --------
src/utils.h | 1 -
6 files changed, 6 insertions(+), 15 deletions(-)
--- a/src/aead.c
+++ b/src/aead.c
@@ -605,7 +605,7 @@ aead_chunk_decrypt(cipher_ctx_t *ctx, ui
return CRYPTO_ERROR;
assert(*plen == CHUNK_SIZE_LEN);
- mlen = load16_be(len_buf);
+ mlen = ntohs(*(uint16_t*)len_buf);
mlen = mlen & CHUNK_SIZE_MASK;
if (mlen == 0)
--- a/src/local.c
+++ b/src/local.c
@@ -390,7 +390,7 @@ server_handshake(EV_P_ ev_io *w, buffer_
abuf->len += in_addr_len + 2;
if (acl || verbose) {
- uint16_t p = load16_be(buf->data + request_len + in_addr_len);
+ uint16_t p = ntohs(*(uint16_t*)(buf->data + request_len + in_addr_len));
if (!inet_ntop(AF_INET, (const void *)(buf->data + request_len),
ip, INET_ADDRSTRLEN)) {
LOGI("inet_ntop(AF_INET): %s", strerror(errno));
@@ -408,7 +408,7 @@ server_handshake(EV_P_ ev_io *w, buffer_
abuf->len += name_len + 2;
if (acl || verbose) {
- uint16_t p = load16_be(buf->data + request_len + 1 + name_len);
+ uint16_t p = ntohs(*(uint16_t*)(buf->data + request_len + 1 + name_len));
memcpy(host, buf->data + request_len + 1, name_len);
host[name_len] = '\0';
sprintf(port, "%d", p);
@@ -422,7 +422,7 @@ server_handshake(EV_P_ ev_io *w, buffer_
abuf->len += in6_addr_len + 2;
if (acl || verbose) {
- uint16_t p = load16_be(buf->data + request_len + in6_addr_len);
+ uint16_t p = ntohs(*(uint16_t*)(buf->data + request_len + in6_addr_len));
if (!inet_ntop(AF_INET6, (const void *)(buf->data + request_len),
ip, INET6_ADDRSTRLEN)) {
LOGI("inet_ntop(AF_INET6): %s", strerror(errno));
--- a/src/server.c
+++ b/src/server.c
@@ -1137,7 +1137,7 @@ server_recv_cb(EV_P_ ev_io *w, int reven
return;
}
- port = ntohs(load16_be(server->buf->data + offset));
+ port = *(uint16_t*)(server->buf->data + offset);
offset += 2;
--- a/src/udprelay.c
+++ b/src/udprelay.c
@@ -316,7 +316,7 @@ parse_udprelay_header(const char *buf, c
}
if (port != NULL) {
- sprintf(port, "%d", load16_be(buf + offset));
+ sprintf(port, "%d", ntohs(*(uint16_t*)(buf + offset)));
}
offset += 2;
--- a/src/utils.c
+++ b/src/utils.c
@@ -571,14 +571,6 @@ get_default_conf(void)
#endif
}
-uint16_t
-load16_be(const void *s)
-{
- const uint8_t *in = (const uint8_t *)s;
- return ((uint16_t)in[0] << 8)
- | ((uint16_t)in[1]);
-}
-
int
get_mptcp(int enable)
{
--- a/src/utils.h
+++ b/src/utils.h
@@ -249,7 +249,6 @@ void *ss_realloc(void *ptr, size_t new_s
int ss_is_ipv6addr(const char *addr);
char *get_default_conf(void);
-uint16_t load16_be(const void *s);
int get_mptcp(int enable);
#endif // _UTILS_H
|
28harishkumar/blog
| 14,879
|
resources/assets/less/bootstrap/glyphicons.less
|
//
// Glyphicons for Bootstrap
//
// Since icons are fonts, they can be placed anywhere text is placed and are
// thus automatically sized to match the surrounding child. To use, create an
// inline element with the appropriate classes, like so:
//
// <a href="#"><span class="glyphicon glyphicon-star"></span> Star</a>
// Import the fonts
@font-face {
font-family: 'Glyphicons Halflings';
src: url('@{icon-font-path}@{icon-font-name}.eot');
src: url('@{icon-font-path}@{icon-font-name}.eot?#iefix') format('embedded-opentype'),
url('@{icon-font-path}@{icon-font-name}.woff') format('woff'),
url('@{icon-font-path}@{icon-font-name}.ttf') format('truetype'),
url('@{icon-font-path}@{icon-font-name}.svg#@{icon-font-svg-id}') format('svg');
}
// Catchall baseclass
.glyphicon {
position: relative;
top: 1px;
display: inline-block;
font-family: 'Glyphicons Halflings';
font-style: normal;
font-weight: normal;
line-height: 1;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
// Individual icons
.glyphicon-asterisk { &:before { content: "\2a"; } }
.glyphicon-plus { &:before { content: "\2b"; } }
.glyphicon-euro,
.glyphicon-eur { &:before { content: "\20ac"; } }
.glyphicon-minus { &:before { content: "\2212"; } }
.glyphicon-cloud { &:before { content: "\2601"; } }
.glyphicon-envelope { &:before { content: "\2709"; } }
.glyphicon-pencil { &:before { content: "\270f"; } }
.glyphicon-glass { &:before { content: "\e001"; } }
.glyphicon-music { &:before { content: "\e002"; } }
.glyphicon-search { &:before { content: "\e003"; } }
.glyphicon-heart { &:before { content: "\e005"; } }
.glyphicon-star { &:before { content: "\e006"; } }
.glyphicon-star-empty { &:before { content: "\e007"; } }
.glyphicon-user { &:before { content: "\e008"; } }
.glyphicon-film { &:before { content: "\e009"; } }
.glyphicon-th-large { &:before { content: "\e010"; } }
.glyphicon-th { &:before { content: "\e011"; } }
.glyphicon-th-list { &:before { content: "\e012"; } }
.glyphicon-ok { &:before { content: "\e013"; } }
.glyphicon-remove { &:before { content: "\e014"; } }
.glyphicon-zoom-in { &:before { content: "\e015"; } }
.glyphicon-zoom-out { &:before { content: "\e016"; } }
.glyphicon-off { &:before { content: "\e017"; } }
.glyphicon-signal { &:before { content: "\e018"; } }
.glyphicon-cog { &:before { content: "\e019"; } }
.glyphicon-trash { &:before { content: "\e020"; } }
.glyphicon-home { &:before { content: "\e021"; } }
.glyphicon-file { &:before { content: "\e022"; } }
.glyphicon-time { &:before { content: "\e023"; } }
.glyphicon-road { &:before { content: "\e024"; } }
.glyphicon-download-alt { &:before { content: "\e025"; } }
.glyphicon-download { &:before { content: "\e026"; } }
.glyphicon-upload { &:before { content: "\e027"; } }
.glyphicon-inbox { &:before { content: "\e028"; } }
.glyphicon-play-circle { &:before { content: "\e029"; } }
.glyphicon-repeat { &:before { content: "\e030"; } }
.glyphicon-refresh { &:before { content: "\e031"; } }
.glyphicon-list-alt { &:before { content: "\e032"; } }
.glyphicon-lock { &:before { content: "\e033"; } }
.glyphicon-flag { &:before { content: "\e034"; } }
.glyphicon-headphones { &:before { content: "\e035"; } }
.glyphicon-volume-off { &:before { content: "\e036"; } }
.glyphicon-volume-down { &:before { content: "\e037"; } }
.glyphicon-volume-up { &:before { content: "\e038"; } }
.glyphicon-qrcode { &:before { content: "\e039"; } }
.glyphicon-barcode { &:before { content: "\e040"; } }
.glyphicon-tag { &:before { content: "\e041"; } }
.glyphicon-tags { &:before { content: "\e042"; } }
.glyphicon-book { &:before { content: "\e043"; } }
.glyphicon-bookmark { &:before { content: "\e044"; } }
.glyphicon-print { &:before { content: "\e045"; } }
.glyphicon-camera { &:before { content: "\e046"; } }
.glyphicon-font { &:before { content: "\e047"; } }
.glyphicon-bold { &:before { content: "\e048"; } }
.glyphicon-italic { &:before { content: "\e049"; } }
.glyphicon-text-height { &:before { content: "\e050"; } }
.glyphicon-text-width { &:before { content: "\e051"; } }
.glyphicon-align-left { &:before { content: "\e052"; } }
.glyphicon-align-center { &:before { content: "\e053"; } }
.glyphicon-align-right { &:before { content: "\e054"; } }
.glyphicon-align-justify { &:before { content: "\e055"; } }
.glyphicon-list { &:before { content: "\e056"; } }
.glyphicon-indent-left { &:before { content: "\e057"; } }
.glyphicon-indent-right { &:before { content: "\e058"; } }
.glyphicon-facetime-video { &:before { content: "\e059"; } }
.glyphicon-picture { &:before { content: "\e060"; } }
.glyphicon-map-marker { &:before { content: "\e062"; } }
.glyphicon-adjust { &:before { content: "\e063"; } }
.glyphicon-tint { &:before { content: "\e064"; } }
.glyphicon-edit { &:before { content: "\e065"; } }
.glyphicon-share { &:before { content: "\e066"; } }
.glyphicon-check { &:before { content: "\e067"; } }
.glyphicon-move { &:before { content: "\e068"; } }
.glyphicon-step-backward { &:before { content: "\e069"; } }
.glyphicon-fast-backward { &:before { content: "\e070"; } }
.glyphicon-backward { &:before { content: "\e071"; } }
.glyphicon-play { &:before { content: "\e072"; } }
.glyphicon-pause { &:before { content: "\e073"; } }
.glyphicon-stop { &:before { content: "\e074"; } }
.glyphicon-forward { &:before { content: "\e075"; } }
.glyphicon-fast-forward { &:before { content: "\e076"; } }
.glyphicon-step-forward { &:before { content: "\e077"; } }
.glyphicon-eject { &:before { content: "\e078"; } }
.glyphicon-chevron-left { &:before { content: "\e079"; } }
.glyphicon-chevron-right { &:before { content: "\e080"; } }
.glyphicon-plus-sign { &:before { content: "\e081"; } }
.glyphicon-minus-sign { &:before { content: "\e082"; } }
.glyphicon-remove-sign { &:before { content: "\e083"; } }
.glyphicon-ok-sign { &:before { content: "\e084"; } }
.glyphicon-question-sign { &:before { content: "\e085"; } }
.glyphicon-info-sign { &:before { content: "\e086"; } }
.glyphicon-screenshot { &:before { content: "\e087"; } }
.glyphicon-remove-circle { &:before { content: "\e088"; } }
.glyphicon-ok-circle { &:before { content: "\e089"; } }
.glyphicon-ban-circle { &:before { content: "\e090"; } }
.glyphicon-arrow-left { &:before { content: "\e091"; } }
.glyphicon-arrow-right { &:before { content: "\e092"; } }
.glyphicon-arrow-up { &:before { content: "\e093"; } }
.glyphicon-arrow-down { &:before { content: "\e094"; } }
.glyphicon-share-alt { &:before { content: "\e095"; } }
.glyphicon-resize-full { &:before { content: "\e096"; } }
.glyphicon-resize-small { &:before { content: "\e097"; } }
.glyphicon-exclamation-sign { &:before { content: "\e101"; } }
.glyphicon-gift { &:before { content: "\e102"; } }
.glyphicon-leaf { &:before { content: "\e103"; } }
.glyphicon-fire { &:before { content: "\e104"; } }
.glyphicon-eye-open { &:before { content: "\e105"; } }
.glyphicon-eye-close { &:before { content: "\e106"; } }
.glyphicon-warning-sign { &:before { content: "\e107"; } }
.glyphicon-plane { &:before { content: "\e108"; } }
.glyphicon-calendar { &:before { content: "\e109"; } }
.glyphicon-random { &:before { content: "\e110"; } }
.glyphicon-comment { &:before { content: "\e111"; } }
.glyphicon-magnet { &:before { content: "\e112"; } }
.glyphicon-chevron-up { &:before { content: "\e113"; } }
.glyphicon-chevron-down { &:before { content: "\e114"; } }
.glyphicon-retweet { &:before { content: "\e115"; } }
.glyphicon-shopping-cart { &:before { content: "\e116"; } }
.glyphicon-folder-close { &:before { content: "\e117"; } }
.glyphicon-folder-open { &:before { content: "\e118"; } }
.glyphicon-resize-vertical { &:before { content: "\e119"; } }
.glyphicon-resize-horizontal { &:before { content: "\e120"; } }
.glyphicon-hdd { &:before { content: "\e121"; } }
.glyphicon-bullhorn { &:before { content: "\e122"; } }
.glyphicon-bell { &:before { content: "\e123"; } }
.glyphicon-certificate { &:before { content: "\e124"; } }
.glyphicon-thumbs-up { &:before { content: "\e125"; } }
.glyphicon-thumbs-down { &:before { content: "\e126"; } }
.glyphicon-hand-right { &:before { content: "\e127"; } }
.glyphicon-hand-left { &:before { content: "\e128"; } }
.glyphicon-hand-up { &:before { content: "\e129"; } }
.glyphicon-hand-down { &:before { content: "\e130"; } }
.glyphicon-circle-arrow-right { &:before { content: "\e131"; } }
.glyphicon-circle-arrow-left { &:before { content: "\e132"; } }
.glyphicon-circle-arrow-up { &:before { content: "\e133"; } }
.glyphicon-circle-arrow-down { &:before { content: "\e134"; } }
.glyphicon-globe { &:before { content: "\e135"; } }
.glyphicon-wrench { &:before { content: "\e136"; } }
.glyphicon-tasks { &:before { content: "\e137"; } }
.glyphicon-filter { &:before { content: "\e138"; } }
.glyphicon-briefcase { &:before { content: "\e139"; } }
.glyphicon-fullscreen { &:before { content: "\e140"; } }
.glyphicon-dashboard { &:before { content: "\e141"; } }
.glyphicon-paperclip { &:before { content: "\e142"; } }
.glyphicon-heart-empty { &:before { content: "\e143"; } }
.glyphicon-link { &:before { content: "\e144"; } }
.glyphicon-phone { &:before { content: "\e145"; } }
.glyphicon-pushpin { &:before { content: "\e146"; } }
.glyphicon-usd { &:before { content: "\e148"; } }
.glyphicon-gbp { &:before { content: "\e149"; } }
.glyphicon-sort { &:before { content: "\e150"; } }
.glyphicon-sort-by-alphabet { &:before { content: "\e151"; } }
.glyphicon-sort-by-alphabet-alt { &:before { content: "\e152"; } }
.glyphicon-sort-by-order { &:before { content: "\e153"; } }
.glyphicon-sort-by-order-alt { &:before { content: "\e154"; } }
.glyphicon-sort-by-attributes { &:before { content: "\e155"; } }
.glyphicon-sort-by-attributes-alt { &:before { content: "\e156"; } }
.glyphicon-unchecked { &:before { content: "\e157"; } }
.glyphicon-expand { &:before { content: "\e158"; } }
.glyphicon-collapse-down { &:before { content: "\e159"; } }
.glyphicon-collapse-up { &:before { content: "\e160"; } }
.glyphicon-log-in { &:before { content: "\e161"; } }
.glyphicon-flash { &:before { content: "\e162"; } }
.glyphicon-log-out { &:before { content: "\e163"; } }
.glyphicon-new-window { &:before { content: "\e164"; } }
.glyphicon-record { &:before { content: "\e165"; } }
.glyphicon-save { &:before { content: "\e166"; } }
.glyphicon-open { &:before { content: "\e167"; } }
.glyphicon-saved { &:before { content: "\e168"; } }
.glyphicon-import { &:before { content: "\e169"; } }
.glyphicon-export { &:before { content: "\e170"; } }
.glyphicon-send { &:before { content: "\e171"; } }
.glyphicon-floppy-disk { &:before { content: "\e172"; } }
.glyphicon-floppy-saved { &:before { content: "\e173"; } }
.glyphicon-floppy-remove { &:before { content: "\e174"; } }
.glyphicon-floppy-save { &:before { content: "\e175"; } }
.glyphicon-floppy-open { &:before { content: "\e176"; } }
.glyphicon-credit-card { &:before { content: "\e177"; } }
.glyphicon-transfer { &:before { content: "\e178"; } }
.glyphicon-cutlery { &:before { content: "\e179"; } }
.glyphicon-header { &:before { content: "\e180"; } }
.glyphicon-compressed { &:before { content: "\e181"; } }
.glyphicon-earphone { &:before { content: "\e182"; } }
.glyphicon-phone-alt { &:before { content: "\e183"; } }
.glyphicon-tower { &:before { content: "\e184"; } }
.glyphicon-stats { &:before { content: "\e185"; } }
.glyphicon-sd-video { &:before { content: "\e186"; } }
.glyphicon-hd-video { &:before { content: "\e187"; } }
.glyphicon-subtitles { &:before { content: "\e188"; } }
.glyphicon-sound-stereo { &:before { content: "\e189"; } }
.glyphicon-sound-dolby { &:before { content: "\e190"; } }
.glyphicon-sound-5-1 { &:before { content: "\e191"; } }
.glyphicon-sound-6-1 { &:before { content: "\e192"; } }
.glyphicon-sound-7-1 { &:before { content: "\e193"; } }
.glyphicon-copyright-mark { &:before { content: "\e194"; } }
.glyphicon-registration-mark { &:before { content: "\e195"; } }
.glyphicon-cloud-download { &:before { content: "\e197"; } }
.glyphicon-cloud-upload { &:before { content: "\e198"; } }
.glyphicon-tree-conifer { &:before { content: "\e199"; } }
.glyphicon-tree-deciduous { &:before { content: "\e200"; } }
|
2929004360/ruoyi-sign
| 22,679
|
ruoyi-common/src/main/java/com/ruoyi/common/core/text/Convert.java
|
package com.ruoyi.common.core.text;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.text.NumberFormat;
import java.util.Set;
import com.ruoyi.common.utils.StringUtils;
import org.apache.commons.lang3.ArrayUtils;
/**
* 类型转换器
*
* @author ruoyi
*/
public class Convert
{
/**
* 转换为字符串<br>
* 如果给定的值为null,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static String toStr(Object value, String defaultValue)
{
if (null == value)
{
return defaultValue;
}
if (value instanceof String)
{
return (String) value;
}
return value.toString();
}
/**
* 转换为字符串<br>
* 如果给定的值为<code>null</code>,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static String toStr(Object value)
{
return toStr(value, null);
}
/**
* 转换为字符<br>
* 如果给定的值为null,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Character toChar(Object value, Character defaultValue)
{
if (null == value)
{
return defaultValue;
}
if (value instanceof Character)
{
return (Character) value;
}
final String valueStr = toStr(value, null);
return StringUtils.isEmpty(valueStr) ? defaultValue : valueStr.charAt(0);
}
/**
* 转换为字符<br>
* 如果给定的值为<code>null</code>,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Character toChar(Object value)
{
return toChar(value, null);
}
/**
* 转换为byte<br>
* 如果给定的值为<code>null</code>,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Byte toByte(Object value, Byte defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof Byte)
{
return (Byte) value;
}
if (value instanceof Number)
{
return ((Number) value).byteValue();
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
return Byte.parseByte(valueStr);
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为byte<br>
* 如果给定的值为<code>null</code>,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Byte toByte(Object value)
{
return toByte(value, null);
}
/**
* 转换为Short<br>
* 如果给定的值为<code>null</code>,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Short toShort(Object value, Short defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof Short)
{
return (Short) value;
}
if (value instanceof Number)
{
return ((Number) value).shortValue();
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
return Short.parseShort(valueStr.trim());
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为Short<br>
* 如果给定的值为<code>null</code>,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Short toShort(Object value)
{
return toShort(value, null);
}
/**
* 转换为Number<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Number toNumber(Object value, Number defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof Number)
{
return (Number) value;
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
return NumberFormat.getInstance().parse(valueStr);
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为Number<br>
* 如果给定的值为空,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Number toNumber(Object value)
{
return toNumber(value, null);
}
/**
* 转换为int<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Integer toInt(Object value, Integer defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof Integer)
{
return (Integer) value;
}
if (value instanceof Number)
{
return ((Number) value).intValue();
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
return Integer.parseInt(valueStr.trim());
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为int<br>
* 如果给定的值为<code>null</code>,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Integer toInt(Object value)
{
return toInt(value, null);
}
/**
* 转换为Integer数组<br>
*
* @param str 被转换的值
* @return 结果
*/
public static Integer[] toIntArray(String str)
{
return toIntArray(",", str);
}
/**
* 转换为Long数组<br>
*
* @param str 被转换的值
* @return 结果
*/
public static Long[] toLongArray(String str)
{
return toLongArray(",", str);
}
/**
* 转换为Integer数组<br>
*
* @param split 分隔符
* @param split 被转换的值
* @return 结果
*/
public static Integer[] toIntArray(String split, String str)
{
if (StringUtils.isEmpty(str))
{
return new Integer[] {};
}
String[] arr = str.split(split);
final Integer[] ints = new Integer[arr.length];
for (int i = 0; i < arr.length; i++)
{
final Integer v = toInt(arr[i], 0);
ints[i] = v;
}
return ints;
}
/**
* 转换为Long数组<br>
*
* @param split 分隔符
* @param str 被转换的值
* @return 结果
*/
public static Long[] toLongArray(String split, String str)
{
if (StringUtils.isEmpty(str))
{
return new Long[] {};
}
String[] arr = str.split(split);
final Long[] longs = new Long[arr.length];
for (int i = 0; i < arr.length; i++)
{
final Long v = toLong(arr[i], null);
longs[i] = v;
}
return longs;
}
/**
* 转换为String数组<br>
*
* @param str 被转换的值
* @return 结果
*/
public static String[] toStrArray(String str)
{
if (StringUtils.isEmpty(str))
{
return new String[] {};
}
return toStrArray(",", str);
}
/**
* 转换为String数组<br>
*
* @param split 分隔符
* @param split 被转换的值
* @return 结果
*/
public static String[] toStrArray(String split, String str)
{
return str.split(split);
}
/**
* 转换为long<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Long toLong(Object value, Long defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof Long)
{
return (Long) value;
}
if (value instanceof Number)
{
return ((Number) value).longValue();
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
// 支持科学计数法
return new BigDecimal(valueStr.trim()).longValue();
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为long<br>
* 如果给定的值为<code>null</code>,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Long toLong(Object value)
{
return toLong(value, null);
}
/**
* 转换为double<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Double toDouble(Object value, Double defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof Double)
{
return (Double) value;
}
if (value instanceof Number)
{
return ((Number) value).doubleValue();
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
// 支持科学计数法
return new BigDecimal(valueStr.trim()).doubleValue();
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为double<br>
* 如果给定的值为空,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Double toDouble(Object value)
{
return toDouble(value, null);
}
/**
* 转换为Float<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Float toFloat(Object value, Float defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof Float)
{
return (Float) value;
}
if (value instanceof Number)
{
return ((Number) value).floatValue();
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
return Float.parseFloat(valueStr.trim());
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为Float<br>
* 如果给定的值为空,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Float toFloat(Object value)
{
return toFloat(value, null);
}
/**
* 转换为boolean<br>
* String支持的值为:true、false、yes、ok、no,1,0 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static Boolean toBool(Object value, Boolean defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof Boolean)
{
return (Boolean) value;
}
String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
valueStr = valueStr.trim().toLowerCase();
switch (valueStr)
{
case "true":
case "yes":
case "ok":
case "1":
return true;
case "false":
case "no":
case "0":
return false;
default:
return defaultValue;
}
}
/**
* 转换为boolean<br>
* 如果给定的值为空,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static Boolean toBool(Object value)
{
return toBool(value, null);
}
/**
* 转换为Enum对象<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
*
* @param clazz Enum的Class
* @param value 值
* @param defaultValue 默认值
* @return Enum
*/
public static <E extends Enum<E>> E toEnum(Class<E> clazz, Object value, E defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (clazz.isAssignableFrom(value.getClass()))
{
@SuppressWarnings("unchecked")
E myE = (E) value;
return myE;
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
return Enum.valueOf(clazz, valueStr);
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为Enum对象<br>
* 如果给定的值为空,或者转换失败,返回默认值<code>null</code><br>
*
* @param clazz Enum的Class
* @param value 值
* @return Enum
*/
public static <E extends Enum<E>> E toEnum(Class<E> clazz, Object value)
{
return toEnum(clazz, value, null);
}
/**
* 转换为BigInteger<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static BigInteger toBigInteger(Object value, BigInteger defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof BigInteger)
{
return (BigInteger) value;
}
if (value instanceof Long)
{
return BigInteger.valueOf((Long) value);
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
return new BigInteger(valueStr);
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为BigInteger<br>
* 如果给定的值为空,或者转换失败,返回默认值<code>null</code><br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static BigInteger toBigInteger(Object value)
{
return toBigInteger(value, null);
}
/**
* 转换为BigDecimal<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @param defaultValue 转换错误时的默认值
* @return 结果
*/
public static BigDecimal toBigDecimal(Object value, BigDecimal defaultValue)
{
if (value == null)
{
return defaultValue;
}
if (value instanceof BigDecimal)
{
return (BigDecimal) value;
}
if (value instanceof Long)
{
return new BigDecimal((Long) value);
}
if (value instanceof Double)
{
return BigDecimal.valueOf((Double) value);
}
if (value instanceof Integer)
{
return new BigDecimal((Integer) value);
}
final String valueStr = toStr(value, null);
if (StringUtils.isEmpty(valueStr))
{
return defaultValue;
}
try
{
return new BigDecimal(valueStr);
}
catch (Exception e)
{
return defaultValue;
}
}
/**
* 转换为BigDecimal<br>
* 如果给定的值为空,或者转换失败,返回默认值<br>
* 转换失败不会报错
*
* @param value 被转换的值
* @return 结果
*/
public static BigDecimal toBigDecimal(Object value)
{
return toBigDecimal(value, null);
}
/**
* 将对象转为字符串<br>
* 1、Byte数组和ByteBuffer会被转换为对应字符串的数组 2、对象数组会调用Arrays.toString方法
*
* @param obj 对象
* @return 字符串
*/
public static String utf8Str(Object obj)
{
return str(obj, CharsetKit.CHARSET_UTF_8);
}
/**
* 将对象转为字符串<br>
* 1、Byte数组和ByteBuffer会被转换为对应字符串的数组 2、对象数组会调用Arrays.toString方法
*
* @param obj 对象
* @param charsetName 字符集
* @return 字符串
*/
public static String str(Object obj, String charsetName)
{
return str(obj, Charset.forName(charsetName));
}
/**
* 将对象转为字符串<br>
* 1、Byte数组和ByteBuffer会被转换为对应字符串的数组 2、对象数组会调用Arrays.toString方法
*
* @param obj 对象
* @param charset 字符集
* @return 字符串
*/
public static String str(Object obj, Charset charset)
{
if (null == obj)
{
return null;
}
if (obj instanceof String)
{
return (String) obj;
}
else if (obj instanceof byte[])
{
return str((byte[]) obj, charset);
}
else if (obj instanceof Byte[])
{
byte[] bytes = ArrayUtils.toPrimitive((Byte[]) obj);
return str(bytes, charset);
}
else if (obj instanceof ByteBuffer)
{
return str((ByteBuffer) obj, charset);
}
return obj.toString();
}
/**
* 将byte数组转为字符串
*
* @param bytes byte数组
* @param charset 字符集
* @return 字符串
*/
public static String str(byte[] bytes, String charset)
{
return str(bytes, StringUtils.isEmpty(charset) ? Charset.defaultCharset() : Charset.forName(charset));
}
/**
* 解码字节码
*
* @param data 字符串
* @param charset 字符集,如果此字段为空,则解码的结果取决于平台
* @return 解码后的字符串
*/
public static String str(byte[] data, Charset charset)
{
if (data == null)
{
return null;
}
if (null == charset)
{
return new String(data);
}
return new String(data, charset);
}
/**
* 将编码的byteBuffer数据转换为字符串
*
* @param data 数据
* @param charset 字符集,如果为空使用当前系统字符集
* @return 字符串
*/
public static String str(ByteBuffer data, String charset)
{
if (data == null)
{
return null;
}
return str(data, Charset.forName(charset));
}
/**
* 将编码的byteBuffer数据转换为字符串
*
* @param data 数据
* @param charset 字符集,如果为空使用当前系统字符集
* @return 字符串
*/
public static String str(ByteBuffer data, Charset charset)
{
if (null == charset)
{
charset = Charset.defaultCharset();
}
return charset.decode(data).toString();
}
// ----------------------------------------------------------------------- 全角半角转换
/**
* 半角转全角
*
* @param input String.
* @return 全角字符串.
*/
public static String toSBC(String input)
{
return toSBC(input, null);
}
/**
* 半角转全角
*
* @param input String
* @param notConvertSet 不替换的字符集合
* @return 全角字符串.
*/
public static String toSBC(String input, Set<Character> notConvertSet)
{
char[] c = input.toCharArray();
for (int i = 0; i < c.length; i++)
{
if (null != notConvertSet && notConvertSet.contains(c[i]))
{
// 跳过不替换的字符
continue;
}
if (c[i] == ' ')
{
c[i] = '\u3000';
}
else if (c[i] < '\177')
{
c[i] = (char) (c[i] + 65248);
}
}
return new String(c);
}
/**
* 全角转半角
*
* @param input String.
* @return 半角字符串
*/
public static String toDBC(String input)
{
return toDBC(input, null);
}
/**
* 替换全角为半角
*
* @param text 文本
* @param notConvertSet 不替换的字符集合
* @return 替换后的字符
*/
public static String toDBC(String text, Set<Character> notConvertSet)
{
char[] c = text.toCharArray();
for (int i = 0; i < c.length; i++)
{
if (null != notConvertSet && notConvertSet.contains(c[i]))
{
// 跳过不替换的字符
continue;
}
if (c[i] == '\u3000')
{
c[i] = ' ';
}
else if (c[i] > '\uFF00' && c[i] < '\uFF5F')
{
c[i] = (char) (c[i] - 65248);
}
}
String returnString = new String(c);
return returnString;
}
/**
* 数字金额大写转换 先写个完整的然后将如零拾替换成零
*
* @param n 数字
* @return 中文大写数字
*/
public static String digitUppercase(double n)
{
String[] fraction = { "角", "分" };
String[] digit = { "零", "壹", "贰", "叁", "肆", "伍", "陆", "柒", "捌", "玖" };
String[][] unit = { { "元", "万", "亿" }, { "", "拾", "佰", "仟" } };
String head = n < 0 ? "负" : "";
n = Math.abs(n);
String s = "";
for (int i = 0; i < fraction.length; i++)
{
// 优化double计算精度丢失问题
BigDecimal nNum = new BigDecimal(n);
BigDecimal decimal = new BigDecimal(10);
BigDecimal scale = nNum.multiply(decimal).setScale(2, RoundingMode.HALF_EVEN);
double d = scale.doubleValue();
s += (digit[(int) (Math.floor(d * Math.pow(10, i)) % 10)] + fraction[i]).replaceAll("(零.)+", "");
}
if (s.length() < 1)
{
s = "整";
}
int integerPart = (int) Math.floor(n);
for (int i = 0; i < unit[0].length && integerPart > 0; i++)
{
String p = "";
for (int j = 0; j < unit[1].length && n > 0; j++)
{
p = digit[integerPart % 10] + unit[1][j] + p;
integerPart = integerPart / 10;
}
s = p.replaceAll("(零.)*零$", "").replaceAll("^$", "零") + unit[0][i] + s;
}
return head + s.replaceAll("(零.)*零元", "元").replaceFirst("(零.)+", "").replaceAll("(零.)+", "零").replaceAll("^整$", "零元整");
}
}
|
281677160/openwrt-package
| 4,785
|
luci-app-ssr-plus/shadowsocks-libev/patches/101-Fix-mishandling-of-incoming-socket-buffer.-It-must-b.patch
|
From 8be7a7cb00b9540e9be05d409191b0bc1ba424f0 Mon Sep 17 00:00:00 2001
From: notsure2 <[email protected]>
Date: Mon, 11 Dec 2023 09:15:47 +0200
Subject: [PATCH] Fix mishandling of incoming socket buffer. It must be set on
the listening socket not the accepted socket.
---
src/local.c | 16 ++++++++--------
src/redir.c | 16 ++++++++--------
src/server.c | 16 ++++++++--------
src/tunnel.c | 16 ++++++++--------
4 files changed, 32 insertions(+), 32 deletions(-)
--- a/src/local.c
+++ b/src/local.c
@@ -205,6 +205,14 @@ create_and_bind(const char *addr, const
}
}
+ if (tcp_incoming_sndbuf > 0) {
+ setsockopt(listen_sock, SOL_SOCKET, SO_SNDBUF, &tcp_incoming_sndbuf, sizeof(int));
+ }
+
+ if (tcp_incoming_rcvbuf > 0) {
+ setsockopt(listen_sock, SOL_SOCKET, SO_RCVBUF, &tcp_incoming_rcvbuf, sizeof(int));
+ }
+
s = bind(listen_sock, rp->ai_addr, rp->ai_addrlen);
if (s == 0) {
/* We managed to bind successfully! */
@@ -1406,14 +1414,6 @@ accept_cb(EV_P_ ev_io *w, int revents)
setsockopt(serverfd, SOL_SOCKET, SO_NOSIGPIPE, &opt, sizeof(opt));
#endif
- if (tcp_incoming_sndbuf > 0) {
- setsockopt(serverfd, SOL_SOCKET, SO_SNDBUF, &tcp_incoming_sndbuf, sizeof(int));
- }
-
- if (tcp_incoming_rcvbuf > 0) {
- setsockopt(serverfd, SOL_SOCKET, SO_RCVBUF, &tcp_incoming_rcvbuf, sizeof(int));
- }
-
server_t *server = new_server(serverfd);
server->listener = listener;
--- a/src/redir.c
+++ b/src/redir.c
@@ -201,6 +201,14 @@ create_and_bind(const char *addr, const
LOGI("tcp tproxy mode enabled");
}
+ if (tcp_incoming_sndbuf > 0) {
+ setsockopt(listen_sock, SOL_SOCKET, SO_SNDBUF, &tcp_incoming_sndbuf, sizeof(int));
+ }
+
+ if (tcp_incoming_rcvbuf > 0) {
+ setsockopt(listen_sock, SOL_SOCKET, SO_RCVBUF, &tcp_incoming_rcvbuf, sizeof(int));
+ }
+
s = bind(listen_sock, rp->ai_addr, rp->ai_addrlen);
if (s == 0) {
/* We managed to bind successfully! */
@@ -759,14 +767,6 @@ accept_cb(EV_P_ ev_io *w, int revents)
setsockopt(serverfd, SOL_SOCKET, SO_NOSIGPIPE, &opt, sizeof(opt));
#endif
- if (tcp_incoming_sndbuf > 0) {
- setsockopt(serverfd, SOL_SOCKET, SO_SNDBUF, &tcp_incoming_sndbuf, sizeof(int));
- }
-
- if (tcp_incoming_rcvbuf > 0) {
- setsockopt(serverfd, SOL_SOCKET, SO_RCVBUF, &tcp_incoming_rcvbuf, sizeof(int));
- }
-
int index = rand() % listener->remote_num;
struct sockaddr *remote_addr = listener->remote_addr[index];
--- a/src/server.c
+++ b/src/server.c
@@ -620,6 +620,14 @@ create_and_bind(const char *host, const
}
}
+ if (tcp_incoming_sndbuf > 0) {
+ setsockopt(listen_sock, SOL_SOCKET, SO_SNDBUF, &tcp_incoming_sndbuf, sizeof(int));
+ }
+
+ if (tcp_incoming_rcvbuf > 0) {
+ setsockopt(listen_sock, SOL_SOCKET, SO_RCVBUF, &tcp_incoming_rcvbuf, sizeof(int));
+ }
+
// Enable out-of-tree mptcp
if (mptcp == 1) {
int i = 0;
@@ -1769,14 +1777,6 @@ accept_cb(EV_P_ ev_io *w, int revents)
setsockopt(serverfd, SOL_SOCKET, SO_NOSIGPIPE, &opt, sizeof(opt));
#endif
- if (tcp_incoming_sndbuf > 0) {
- setsockopt(serverfd, SOL_SOCKET, SO_SNDBUF, &tcp_incoming_sndbuf, sizeof(int));
- }
-
- if (tcp_incoming_rcvbuf > 0) {
- setsockopt(serverfd, SOL_SOCKET, SO_RCVBUF, &tcp_incoming_rcvbuf, sizeof(int));
- }
-
setnonblocking(serverfd);
server_t *server = new_server(serverfd, listener);
--- a/src/tunnel.c
+++ b/src/tunnel.c
@@ -166,6 +166,14 @@ create_and_bind(const char *addr, const
}
}
+ if (tcp_incoming_sndbuf > 0) {
+ setsockopt(listen_sock, SOL_SOCKET, SO_SNDBUF, &tcp_incoming_sndbuf, sizeof(int));
+ }
+
+ if (tcp_incoming_rcvbuf > 0) {
+ setsockopt(listen_sock, SOL_SOCKET, SO_RCVBUF, &tcp_incoming_rcvbuf, sizeof(int));
+ }
+
s = bind(listen_sock, rp->ai_addr, rp->ai_addrlen);
if (s == 0) {
/* We managed to bind successfully! */
@@ -725,14 +733,6 @@ accept_cb(EV_P_ ev_io *w, int revents)
setsockopt(serverfd, SOL_SOCKET, SO_NOSIGPIPE, &opt, sizeof(opt));
#endif
- if (tcp_incoming_sndbuf > 0) {
- setsockopt(serverfd, SOL_SOCKET, SO_SNDBUF, &tcp_incoming_sndbuf, sizeof(int));
- }
-
- if (tcp_incoming_rcvbuf > 0) {
- setsockopt(serverfd, SOL_SOCKET, SO_RCVBUF, &tcp_incoming_rcvbuf, sizeof(int));
- }
-
int index = rand() % listener->remote_num;
struct sockaddr *remote_addr = listener->remote_addr[index];
|
28harishkumar/blog
| 5,624
|
resources/assets/less/bootstrap/button-groups.less
|
//
// Button groups
// --------------------------------------------------
// Make the div behave like a button
.btn-group,
.btn-group-vertical {
position: relative;
display: inline-block;
vertical-align: middle; // match .btn alignment given font-size hack above
> .btn {
position: relative;
float: left;
// Bring the "active" button to the front
&:hover,
&:focus,
&:active,
&.active {
z-index: 2;
}
}
}
// Prevent double borders when buttons are next to each other
.btn-group {
.btn + .btn,
.btn + .btn-group,
.btn-group + .btn,
.btn-group + .btn-group {
margin-left: -1px;
}
}
// Optional: Group multiple button groups together for a toolbar
.btn-toolbar {
margin-left: -5px; // Offset the first child's margin
&:extend(.clearfix all);
.btn-group,
.input-group {
float: left;
}
> .btn,
> .btn-group,
> .input-group {
margin-left: 5px;
}
}
.btn-group > .btn:not(:first-child):not(:last-child):not(.dropdown-toggle) {
border-radius: 0;
}
// Set corners individual because sometimes a single button can be in a .btn-group and we need :first-child and :last-child to both match
.btn-group > .btn:first-child {
margin-left: 0;
&:not(:last-child):not(.dropdown-toggle) {
.border-right-radius(0);
}
}
// Need .dropdown-toggle since :last-child doesn't apply given a .dropdown-menu immediately after it
.btn-group > .btn:last-child:not(:first-child),
.btn-group > .dropdown-toggle:not(:first-child) {
.border-left-radius(0);
}
// Custom edits for including btn-groups within btn-groups (useful for including dropdown buttons within a btn-group)
.btn-group > .btn-group {
float: left;
}
.btn-group > .btn-group:not(:first-child):not(:last-child) > .btn {
border-radius: 0;
}
.btn-group > .btn-group:first-child {
> .btn:last-child,
> .dropdown-toggle {
.border-right-radius(0);
}
}
.btn-group > .btn-group:last-child > .btn:first-child {
.border-left-radius(0);
}
// On active and open, don't show outline
.btn-group .dropdown-toggle:active,
.btn-group.open .dropdown-toggle {
outline: 0;
}
// Sizing
//
// Remix the default button sizing classes into new ones for easier manipulation.
.btn-group-xs > .btn { &:extend(.btn-xs); }
.btn-group-sm > .btn { &:extend(.btn-sm); }
.btn-group-lg > .btn { &:extend(.btn-lg); }
// Split button dropdowns
// ----------------------
// Give the line between buttons some depth
.btn-group > .btn + .dropdown-toggle {
padding-left: 8px;
padding-right: 8px;
}
.btn-group > .btn-lg + .dropdown-toggle {
padding-left: 12px;
padding-right: 12px;
}
// The clickable button for toggling the menu
// Remove the gradient and set the same inset shadow as the :active state
.btn-group.open .dropdown-toggle {
.box-shadow(inset 0 3px 5px rgba(0,0,0,.125));
// Show no shadow for `.btn-link` since it has no other button styles.
&.btn-link {
.box-shadow(none);
}
}
// Reposition the caret
.btn .caret {
margin-left: 0;
}
// Carets in other button sizes
.btn-lg .caret {
border-width: @caret-width-large @caret-width-large 0;
border-bottom-width: 0;
}
// Upside down carets for .dropup
.dropup .btn-lg .caret {
border-width: 0 @caret-width-large @caret-width-large;
}
// Vertical button groups
// ----------------------
.btn-group-vertical {
> .btn,
> .btn-group,
> .btn-group > .btn {
display: block;
float: none;
width: 100%;
max-width: 100%;
}
// Clear floats so dropdown menus can be properly placed
> .btn-group {
&:extend(.clearfix all);
> .btn {
float: none;
}
}
> .btn + .btn,
> .btn + .btn-group,
> .btn-group + .btn,
> .btn-group + .btn-group {
margin-top: -1px;
margin-left: 0;
}
}
.btn-group-vertical > .btn {
&:not(:first-child):not(:last-child) {
border-radius: 0;
}
&:first-child:not(:last-child) {
border-top-right-radius: @border-radius-base;
.border-bottom-radius(0);
}
&:last-child:not(:first-child) {
border-bottom-left-radius: @border-radius-base;
.border-top-radius(0);
}
}
.btn-group-vertical > .btn-group:not(:first-child):not(:last-child) > .btn {
border-radius: 0;
}
.btn-group-vertical > .btn-group:first-child:not(:last-child) {
> .btn:last-child,
> .dropdown-toggle {
.border-bottom-radius(0);
}
}
.btn-group-vertical > .btn-group:last-child:not(:first-child) > .btn:first-child {
.border-top-radius(0);
}
// Justified button groups
// ----------------------
.btn-group-justified {
display: table;
width: 100%;
table-layout: fixed;
border-collapse: separate;
> .btn,
> .btn-group {
float: none;
display: table-cell;
width: 1%;
}
> .btn-group .btn {
width: 100%;
}
> .btn-group .dropdown-menu {
left: auto;
}
}
// Checkbox and radio options
//
// In order to support the browser's form validation feedback, powered by the
// `required` attribute, we have to "hide" the inputs via `clip`. We cannot use
// `display: none;` or `visibility: hidden;` as that also hides the popover.
// Simply visually hiding the inputs via `opacity` would leave them clickable in
// certain cases which is prevented by using `clip` and `pointer-events`.
// This way, we ensure a DOM element is visible to position the popover from.
//
// See https://github.com/twbs/bootstrap/pull/12794 and
// https://github.com/twbs/bootstrap/pull/14559 for more information.
[data-toggle="buttons"] {
> .btn,
> .btn-group > .btn {
input[type="radio"],
input[type="checkbox"] {
position: absolute;
clip: rect(0,0,0,0);
pointer-events: none;
}
}
}
|
28harishkumar/blog
| 3,022
|
resources/assets/less/bootstrap/list-group.less
|
//
// List groups
// --------------------------------------------------
// Base class
//
// Easily usable on <ul>, <ol>, or <div>.
.list-group {
// No need to set list-style: none; since .list-group-item is block level
margin-bottom: 20px;
padding-left: 0; // reset padding because ul and ol
}
// Individual list items
//
// Use on `li`s or `div`s within the `.list-group` parent.
.list-group-item {
position: relative;
display: block;
padding: 10px 15px;
// Place the border on the list items and negative margin up for better styling
margin-bottom: -1px;
background-color: @list-group-bg;
border: 1px solid @list-group-border;
// Round the first and last items
&:first-child {
.border-top-radius(@list-group-border-radius);
}
&:last-child {
margin-bottom: 0;
.border-bottom-radius(@list-group-border-radius);
}
}
// Linked list items
//
// Use anchor elements instead of `li`s or `div`s to create linked list items.
// Includes an extra `.active` modifier class for showing selected items.
a.list-group-item {
color: @list-group-link-color;
.list-group-item-heading {
color: @list-group-link-heading-color;
}
// Hover state
&:hover,
&:focus {
text-decoration: none;
color: @list-group-link-hover-color;
background-color: @list-group-hover-bg;
}
}
.list-group-item {
// Disabled state
&.disabled,
&.disabled:hover,
&.disabled:focus {
background-color: @list-group-disabled-bg;
color: @list-group-disabled-color;
cursor: @cursor-disabled;
// Force color to inherit for custom content
.list-group-item-heading {
color: inherit;
}
.list-group-item-text {
color: @list-group-disabled-text-color;
}
}
// Active class on item itself, not parent
&.active,
&.active:hover,
&.active:focus {
z-index: 2; // Place active items above their siblings for proper border styling
color: @list-group-active-color;
background-color: @list-group-active-bg;
border-color: @list-group-active-border;
// Force color to inherit for custom content
.list-group-item-heading,
.list-group-item-heading > small,
.list-group-item-heading > .small {
color: inherit;
}
.list-group-item-text {
color: @list-group-active-text-color;
}
}
}
// Contextual variants
//
// Add modifier classes to change text and background color on individual items.
// Organizationally, this must come after the `:hover` states.
.list-group-item-variant(success; @state-success-bg; @state-success-text);
.list-group-item-variant(info; @state-info-bg; @state-info-text);
.list-group-item-variant(warning; @state-warning-bg; @state-warning-text);
.list-group-item-variant(danger; @state-danger-bg; @state-danger-text);
// Custom content options
//
// Extra classes for creating well-formatted content within `.list-group-item`s.
.list-group-item-heading {
margin-top: 0;
margin-bottom: 5px;
}
.list-group-item-text {
margin-bottom: 0;
line-height: 1.3;
}
|
2977094657/BilibiliHistoryFetcher
| 64,633
|
scripts/scheduler_db_enhanced.py
|
import calendar
import json
import os
import sqlite3
import threading
from datetime import datetime, timedelta
from typing import List, Dict, Optional
import yaml
from loguru import logger
from scripts.scheduler_db import SchedulerDB
from scripts.utils import setup_logger
# 确保日志系统已初始化
setup_logger()
class EnhancedSchedulerDB(SchedulerDB):
"""增强版调度器数据库管理类,支持主次任务关系"""
_instance = None
_lock = threading.Lock()
@classmethod
def get_instance(cls) -> 'EnhancedSchedulerDB':
"""获取单例实例"""
if not cls._instance:
with cls._lock:
if not cls._instance:
cls._instance = cls()
return cls._instance
def __init__(self):
"""初始化数据库连接"""
if hasattr(self, '_initialized'):
return
# 确保输出目录存在
self.db_dir = os.path.join('output', 'database')
os.makedirs(self.db_dir, exist_ok=True)
self.db_path = os.path.join(self.db_dir, 'scheduler.db')
os.makedirs(os.path.dirname(self.db_path), exist_ok=True)
# 连接数据库并设置时区为 UTC+8
self.conn = sqlite3.connect(self.db_path)
self.conn.execute("PRAGMA timezone='+08:00'")
cursor = self.conn.cursor()
cursor.execute("SELECT datetime('now', 'localtime')")
self.conn.row_factory = sqlite3.Row
# 如果数据库尚未初始化,则执行初始化操作
cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
tables = cursor.fetchall()
if not tables:
# 创建新的表结构
self._create_tables()
# 从配置文件导入初始数据
self._import_config_data()
self._initialized = True
def _drop_all_tables(self):
"""删除所有现有表"""
cursor = self.conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
tables = cursor.fetchall()
for table in tables:
if table[0] not in ['sqlite_sequence']: # 保留系统表
cursor.execute(f"DROP TABLE IF EXISTS {table[0]}")
self.conn.commit()
def _create_tables(self):
"""创建新的表结构"""
cursor = self.conn.cursor()
# 主任务表
cursor.execute('''
CREATE TABLE IF NOT EXISTS main_tasks (
task_id TEXT PRIMARY KEY,
name TEXT NOT NULL,
endpoint TEXT NOT NULL,
method TEXT DEFAULT 'GET',
params TEXT,
schedule_type TEXT NOT NULL,
schedule_time TEXT,
schedule_delay INTEGER,
interval_value INTEGER,
interval_unit TEXT,
enabled INTEGER DEFAULT 1,
task_type TEXT DEFAULT 'main',
created_at TIMESTAMP DEFAULT (datetime('now', 'localtime')),
last_modified TIMESTAMP DEFAULT (datetime('now', 'localtime'))
)
''')
# 子任务表
cursor.execute('''
CREATE TABLE IF NOT EXISTS sub_tasks (
task_id TEXT PRIMARY KEY,
parent_id TEXT NOT NULL,
name TEXT NOT NULL,
sequence_number INTEGER NOT NULL,
endpoint TEXT NOT NULL,
method TEXT DEFAULT 'GET',
params TEXT,
schedule_type TEXT DEFAULT 'daily',
enabled INTEGER DEFAULT 1,
created_at TIMESTAMP DEFAULT (datetime('now', 'localtime')),
last_modified TIMESTAMP DEFAULT (datetime('now', 'localtime')),
FOREIGN KEY (parent_id) REFERENCES main_tasks(task_id) ON DELETE CASCADE
)
''')
# 任务状态表
cursor.execute('''
CREATE TABLE IF NOT EXISTS task_status (
task_id TEXT PRIMARY KEY,
last_run_time TEXT,
next_run_time TEXT,
last_status TEXT,
total_runs INTEGER DEFAULT 0,
success_runs INTEGER DEFAULT 0,
fail_runs INTEGER DEFAULT 0,
avg_duration REAL DEFAULT 0,
last_error TEXT,
tags TEXT,
success_rate REAL DEFAULT 0,
FOREIGN KEY (task_id) REFERENCES main_tasks(task_id) ON DELETE CASCADE
)
''')
# 子任务状态表
cursor.execute('''
CREATE TABLE IF NOT EXISTS sub_task_status (
task_id TEXT PRIMARY KEY,
last_run_time TEXT,
next_run_time TEXT,
last_status TEXT,
total_runs INTEGER DEFAULT 0,
success_runs INTEGER DEFAULT 0,
fail_runs INTEGER DEFAULT 0,
avg_duration REAL DEFAULT 0,
last_error TEXT,
tags TEXT,
success_rate REAL DEFAULT 0,
FOREIGN KEY (task_id) REFERENCES sub_tasks(task_id) ON DELETE CASCADE
)
''')
# 任务执行历史表
cursor.execute('''
CREATE TABLE IF NOT EXISTS task_executions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task_id TEXT NOT NULL,
start_time TEXT NOT NULL,
end_time TEXT,
duration REAL,
status TEXT NOT NULL,
error_message TEXT,
output TEXT,
triggered_by TEXT,
next_run_time TEXT,
created_at TIMESTAMP DEFAULT (datetime('now', 'localtime')),
FOREIGN KEY (task_id) REFERENCES main_tasks(task_id) ON DELETE CASCADE
)
''')
# 子任务执行历史表
cursor.execute('''
CREATE TABLE IF NOT EXISTS sub_task_executions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task_id TEXT NOT NULL,
start_time TEXT NOT NULL,
end_time TEXT,
duration REAL,
status TEXT NOT NULL,
error_message TEXT,
output TEXT,
triggered_by TEXT,
next_run_time TEXT,
created_at TIMESTAMP DEFAULT (datetime('now', 'localtime')),
FOREIGN KEY (task_id) REFERENCES sub_tasks(task_id) ON DELETE CASCADE
)
''')
# 任务依赖关系表 - 移除外键约束
cursor.execute('''
CREATE TABLE IF NOT EXISTS task_dependencies (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task_id TEXT NOT NULL,
depends_on TEXT NOT NULL,
created_at TIMESTAMP DEFAULT (datetime('now', 'localtime')),
UNIQUE(task_id, depends_on)
)
''')
self.conn.commit()
def _import_config_data(self):
"""从配置文件导入初始数据"""
try:
from scripts.utils import get_config_path
config_path = get_config_path('scheduler_config.yaml')
if not os.path.exists(config_path):
print(f"配置文件不存在: {config_path}")
return
with open(config_path, 'r', encoding='utf-8') as f:
config = yaml.safe_load(f)
if not config or 'tasks' not in config:
print("配置文件中没有任务数据")
return
cursor = self.conn.cursor()
# 首先找出所有主任务(没有依赖的任务)
main_tasks = {}
sub_tasks = {}
for task_id, task_data in config['tasks'].items():
if not task_data.get('requires'): # 没有依赖的是主任务
main_tasks[task_id] = task_data
else:
sub_tasks[task_id] = task_data
print(f"找到 {len(main_tasks)} 个主任务和 {len(sub_tasks)} 个子任务")
# 导入主任务
for task_id, task_data in main_tasks.items():
schedule_info = task_data.get('schedule', {})
# 处理interval类型任务的特殊字段
interval_value = None
interval_unit = None
schedule_type = schedule_info.get('type', 'daily')
if schedule_type == 'interval':
# 优先使用interval_value和interval_unit,其次使用value和unit
interval_value = schedule_info.get('interval_value', schedule_info.get('value'))
interval_unit = schedule_info.get('interval_unit', schedule_info.get('unit'))
print(f"任务 {task_id}: 间隔值 = {interval_value}, 间隔单位 = {interval_unit}")
# 插入主任务
cursor.execute("""
INSERT INTO main_tasks (
task_id, name, endpoint, method, params, schedule_type,
schedule_time, schedule_delay, interval_value, interval_unit,
enabled, task_type, last_modified
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
task_id,
task_data.get('name', task_id),
task_data.get('endpoint', ''),
task_data.get('method', 'GET'),
json.dumps(task_data.get('params', {})),
schedule_type,
schedule_info.get('time'),
schedule_info.get('delay'),
interval_value,
interval_unit,
task_data.get('enabled', 1),
'main',
datetime.now().isoformat()
))
# 初始化任务状态
cursor.execute('''
INSERT INTO task_status (task_id, tags)
VALUES (?, ?)
''', (task_id, json.dumps(task_data.get('tags', []))))
# 导入子任务
sequence_counter = {} # 用于记录每个主任务的子任务序号
for task_id, task_data in sub_tasks.items():
# 找到父任务
parent_id = self._find_root_task(task_id, config['tasks'])
if not parent_id:
print(f"警告: 无法找到任务 {task_id} 的父任务")
continue
# 初始化序号计数器
if parent_id not in sequence_counter:
sequence_counter[parent_id] = 1
schedule_info = task_data.get('schedule', {})
sub_task = {
'task_id': task_id,
'parent_id': parent_id,
'name': task_data.get('name', task_id),
'sequence_number': sequence_counter[parent_id],
'endpoint': task_data.get('endpoint', ''),
'method': task_data.get('method', 'GET'),
'params': json.dumps(task_data.get('params', {})),
'schedule_type': schedule_info.get('type', 'daily'), # 修改这里,默认为daily
'enabled': 1
}
# 插入子任务
cursor.execute('''
INSERT INTO sub_tasks (
task_id, parent_id, name, sequence_number,
endpoint, method, params, schedule_type, enabled
) VALUES (
:task_id, :parent_id, :name, :sequence_number,
:endpoint, :method, :params, :schedule_type, :enabled
)
''', sub_task)
# 初始化子任务状态
cursor.execute('''
INSERT INTO sub_task_status (task_id, tags)
VALUES (?, ?)
''', (task_id, json.dumps(task_data.get('tags', []))))
# 更新序号计数器
sequence_counter[parent_id] += 1
# 添加依赖关系
for depends_on in task_data.get('requires', []):
cursor.execute('''
INSERT INTO task_dependencies (task_id, depends_on)
VALUES (?, ?)
''', (task_id, depends_on))
self.conn.commit()
print("成功导入配置数据")
except Exception as e:
print(f"导入配置数据时出错: {str(e)}")
self.conn.rollback()
def _find_root_task(self, task_id: str, tasks: dict) -> str:
"""递归查找任务链的根任务(主任务)"""
task_data = tasks.get(task_id)
if not task_data or not task_data.get('requires'):
return task_id
return self._find_root_task(task_data['requires'][0], tasks)
def close(self):
"""关闭数据库连接"""
if hasattr(self, 'conn'):
self.conn.close()
# =================== 主任务管理 ===================
def get_all_main_tasks(self) -> List[Dict]:
"""获取所有主任务"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT m.*,
ts.last_run_time, ts.next_run_time, ts.last_status,
ts.total_runs, ts.success_runs, ts.fail_runs,
ts.avg_duration, ts.last_error, ts.tags,
ts.success_rate,
m.created_at as created_at_local,
m.last_modified as last_modified_local
FROM main_tasks m
LEFT JOIN task_status ts ON m.task_id = ts.task_id
''')
rows = cursor.fetchall()
result = []
for row in rows:
task_data = dict(row)
# 处理JSON字段
if task_data.get('params'):
try:
task_data['params'] = json.loads(task_data['params'])
except:
task_data['params'] = {}
if task_data.get('tags'):
try:
task_data['tags'] = json.loads(task_data['tags'])
except:
task_data['tags'] = []
# 使用本地时间替换原始时间
task_data['created_at'] = task_data.pop('created_at_local')
task_data['last_modified'] = task_data.pop('last_modified_local')
# 确保 task_type 字段存在
if 'task_type' not in task_data:
task_data['task_type'] = 'main'
# 获取子任务
task_data['sub_tasks'] = self.get_sub_tasks(task_data['task_id'])
result.append(task_data)
return result
def get_main_task_by_id(self, task_id: str) -> Optional[Dict]:
"""获取指定ID的主任务"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT m.*, ts.last_run_time, ts.next_run_time, ts.last_status,
ts.total_runs, ts.success_runs, ts.fail_runs,
ts.avg_duration, ts.last_error, ts.success_rate,
m.created_at as created_at_local,
m.last_modified as last_modified_local
FROM main_tasks m
LEFT JOIN task_status ts ON m.task_id = ts.task_id
WHERE m.task_id = ?
''', (task_id,))
row = cursor.fetchone()
if not row:
return None
task_data = dict(row)
# 处理JSON字段
if task_data.get('params'):
try:
task_data['params'] = json.loads(task_data['params'])
except:
task_data['params'] = {}
if task_data.get('tags'):
try:
task_data['tags'] = json.loads(task_data['tags'])
except:
task_data['tags'] = []
# 使用本地时间替换原始时间
task_data['created_at'] = task_data.pop('created_at_local')
task_data['last_modified'] = task_data.pop('last_modified_local')
return task_data
def create_main_task(self, task_id: str, task_data: Dict) -> bool:
"""创建新的主任务"""
try:
cursor = self.conn.cursor()
# 检查任务ID是否已存在
cursor.execute("SELECT COUNT(*) FROM main_tasks WHERE task_id = ?", (task_id,))
if cursor.fetchone()[0] > 0:
logger.error(f"任务ID '{task_id}' 已存在")
return False
# 准备任务数据
params = task_data.get('params', {})
# 如果是发送邮件任务,确保params包含必要的内容
if task_data.get('endpoint') == '/log/send-email' and not params:
params = {
"content": None,
"mode": "simple",
"subject": "B站历史记录日报 - {current_time}"
}
logger.info(f"为发送邮件任务 '{task_id}' 自动添加默认参数")
params_json = json.dumps(params) if params else None
tags = json.dumps(task_data.get('tags', [])) if task_data.get('tags') else '[]'
# 处理interval类型任务的特殊字段
interval_value = None
interval_unit = None
if task_data.get('schedule_type') == 'interval':
interval_value = task_data.get('interval_value', task_data.get('interval'))
interval_unit = task_data.get('interval_unit', task_data.get('unit'))
logger.info(f"设置间隔执行任务: 每 {interval_value} {interval_unit}")
# 插入主任务
cursor.execute("""
INSERT INTO main_tasks (
task_id, name, endpoint, method, params, schedule_type,
schedule_time, schedule_delay, interval_value, interval_unit,
enabled, task_type, last_modified
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
task_id,
task_data.get('name', task_id),
task_data.get('endpoint', ''),
task_data.get('method', 'GET'),
params_json,
task_data.get('schedule_type', 'daily'),
task_data.get('schedule_time'),
task_data.get('schedule_delay'),
interval_value,
interval_unit,
task_data.get('enabled', 1),
'main',
datetime.now().isoformat()
))
# 初始化任务状态
cursor.execute('''
INSERT INTO task_status (task_id, tags)
VALUES (?, ?)
''', (task_id, tags))
self.conn.commit()
logger.info(f"成功创建主任务 '{task_id}'")
return True
except Exception as e:
self.conn.rollback()
logger.error(f"创建主任务失败: {str(e)}")
return False
def update_main_task(self, task_id: str, task_data: Dict) -> bool:
"""更新主任务信息"""
try:
cursor = self.conn.cursor()
# 检查任务是否存在且为主任务
cursor.execute("SELECT COUNT(*) FROM main_tasks WHERE task_id = ?", (task_id,))
if cursor.fetchone()[0] == 0:
logger.error(f"主任务 '{task_id}' 不存在")
return False
# 准备更新字段
fields = []
values = []
for key, value in task_data.items():
if key in ['name', 'endpoint', 'method', 'schedule_type', 'schedule_time',
'schedule_delay', 'enabled']:
fields.append(f"{key} = ?")
values.append(value)
elif key == 'interval_value' or key == 'interval':
fields.append("interval_value = ?")
values.append(value)
elif key == 'interval_unit' or key == 'unit':
fields.append("interval_unit = ?")
values.append(value)
elif key == 'params':
# 如果是发送邮件任务,确保params包含必要的内容
if 'endpoint' in task_data and task_data['endpoint'] == '/log/send-email':
if not value:
value = {
"content": None,
"mode": "simple",
"subject": "B站历史记录日报 - {current_time}"
}
logger.info(f"为发送邮件任务 '{task_id}' 自动添加默认参数")
fields.append("params = ?")
values.append(json.dumps(value))
elif key == 'tags':
# 更新任务状态表中的标签
try:
cursor.execute("""
UPDATE task_status
SET tags = ?
WHERE task_id = ?
""", (json.dumps(value), task_id))
except Exception as e:
logger.error(f"更新任务标签失败: {str(e)}")
# 添加最后修改时间
fields.append("last_modified = ?")
values.append(datetime.now().isoformat())
# 添加任务ID
values.append(task_id)
if fields:
# 构建更新SQL
sql = f"UPDATE main_tasks SET {', '.join(fields)} WHERE task_id = ?"
cursor.execute(sql, values)
self.conn.commit()
logger.info(f"成功更新主任务 '{task_id}'")
return True
else:
logger.warning(f"没有提供有效的更新字段")
return False
except Exception as e:
self.conn.rollback()
logger.error(f"更新主任务失败: {str(e)}")
return False
def delete_main_task(self, task_id: str) -> bool:
"""删除主任务及其所有子任务"""
try:
cursor = self.conn.cursor()
# 检查任务是否存在且为主任务
cursor.execute("SELECT COUNT(*) FROM main_tasks WHERE task_id = ?", (task_id,))
if cursor.fetchone()[0] == 0:
logger.error(f"主任务 '{task_id}' 不存在")
return False
# 开启事务
self.conn.execute("BEGIN TRANSACTION")
# 获取所有子任务ID
cursor.execute("SELECT task_id FROM sub_tasks WHERE parent_id = ?", (task_id,))
subtask_ids = [row[0] for row in cursor.fetchall()]
# 删除子任务依赖
for subtask_id in subtask_ids:
cursor.execute("DELETE FROM task_dependencies WHERE task_id = ? OR depends_on = ?",
(subtask_id, subtask_id))
# 删除子任务状态
for subtask_id in subtask_ids:
cursor.execute("DELETE FROM sub_task_status WHERE task_id = ?", (subtask_id,))
# 删除子任务执行历史记录
for subtask_id in subtask_ids:
cursor.execute("DELETE FROM sub_task_executions WHERE task_id = ?", (subtask_id,))
# 删除子任务
cursor.execute("DELETE FROM sub_tasks WHERE parent_id = ?", (task_id,))
# 删除主任务依赖
cursor.execute("DELETE FROM task_dependencies WHERE task_id = ? OR depends_on = ?",
(task_id, task_id))
# 删除主任务状态
cursor.execute("DELETE FROM task_status WHERE task_id = ?", (task_id,))
# 删除主任务执行历史记录
cursor.execute("DELETE FROM task_executions WHERE task_id = ?", (task_id,))
# 删除主任务
cursor.execute("DELETE FROM main_tasks WHERE task_id = ?", (task_id,))
self.conn.commit()
logger.info(f"成功删除主任务 '{task_id}' 及其子任务")
return True
except Exception as e:
self.conn.rollback()
logger.error(f"删除主任务失败: {str(e)}")
return False
# =================== 子任务管理 ===================
def get_sub_tasks(self, parent_id: str) -> List[Dict]:
"""获取指定主任务的所有子任务"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT s.*,
sts.last_run_time, sts.next_run_time, sts.last_status,
sts.total_runs, sts.success_runs, sts.fail_runs,
sts.avg_duration, sts.last_error, sts.tags,
sts.success_rate,
s.created_at as created_at_local,
s.last_modified as last_modified_local,
td.depends_on
FROM sub_tasks s
LEFT JOIN sub_task_status sts ON s.task_id = sts.task_id
LEFT JOIN task_dependencies td ON s.task_id = td.task_id
WHERE s.parent_id = ?
ORDER BY s.sequence_number
''', (parent_id,))
rows = cursor.fetchall()
result = []
for row in rows:
task_data = dict(row)
# 处理JSON字段
if task_data.get('params'):
try:
task_data['params'] = json.loads(task_data['params'])
except:
task_data['params'] = {}
if task_data.get('tags'):
try:
task_data['tags'] = json.loads(task_data['tags'])
except:
task_data['tags'] = []
# 使用本地时间替换原始时间
task_data['created_at'] = task_data.pop('created_at_local')
task_data['last_modified'] = task_data.pop('last_modified_local')
# 获取依赖任务信息
cursor.execute('''
SELECT td.depends_on, COALESCE(mt.name, st.name) as depends_on_name
FROM task_dependencies td
LEFT JOIN main_tasks mt ON td.depends_on = mt.task_id
LEFT JOIN sub_tasks st ON td.depends_on = st.task_id
WHERE td.task_id = ?
''', (task_data['task_id'],))
dependencies = cursor.fetchall()
if dependencies:
task_data['depends_on'] = {
'task_id': dependencies[0][0],
'name': dependencies[0][1]
}
result.append(task_data)
return result
def get_subtask_by_id(self, task_id: str) -> Optional[Dict]:
"""获取指定ID的子任务"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT s.*,
sts.last_run_time, sts.next_run_time, sts.last_status,
sts.total_runs, sts.success_runs, sts.fail_runs,
sts.avg_duration, sts.last_error, sts.tags,
sts.success_rate,
s.created_at as created_at_local,
s.last_modified as last_modified_local,
td.depends_on
FROM sub_tasks s
LEFT JOIN sub_task_status sts ON s.task_id = sts.task_id
LEFT JOIN task_dependencies td ON s.task_id = td.task_id
WHERE s.task_id = ?
''', (task_id,))
row = cursor.fetchone()
if not row:
return None
task_data = dict(row)
# 处理JSON字段
if task_data.get('params'):
try:
task_data['params'] = json.loads(task_data['params'])
except:
task_data['params'] = {}
if task_data.get('tags'):
try:
task_data['tags'] = json.loads(task_data['tags'])
except:
task_data['tags'] = []
# 使用本地时间替换原始时间
task_data['created_at'] = task_data.pop('created_at_local')
task_data['last_modified'] = task_data.pop('last_modified_local')
# 获取依赖任务信息
cursor.execute('''
SELECT td.depends_on, COALESCE(mt.name, st.name) as depends_on_name
FROM task_dependencies td
LEFT JOIN main_tasks mt ON td.depends_on = mt.task_id
LEFT JOIN sub_tasks st ON td.depends_on = st.task_id
WHERE td.task_id = ?
''', (task_id,))
dependencies = cursor.fetchall()
if dependencies:
task_data['depends_on'] = {
'task_id': dependencies[0][0],
'name': dependencies[0][1]
}
return task_data
def get_sub_task(self, parent_id: str, task_id: str) -> Optional[Dict]:
"""获取指定主任务下的特定子任务"""
cursor = self.conn.cursor()
cursor.execute('''
SELECT s.*,
sts.last_run_time, sts.next_run_time, sts.last_status,
sts.total_runs, sts.success_runs, sts.fail_runs,
sts.avg_duration, sts.last_error, sts.tags,
sts.success_rate,
s.created_at as created_at_local,
s.last_modified as last_modified_local,
td.depends_on
FROM sub_tasks s
LEFT JOIN sub_task_status sts ON s.task_id = sts.task_id
LEFT JOIN task_dependencies td ON s.task_id = td.task_id
WHERE s.task_id = ? AND s.parent_id = ?
''', (task_id, parent_id))
row = cursor.fetchone()
if not row:
return None
task_data = dict(row)
# 处理JSON字段
if task_data.get('params'):
try:
task_data['params'] = json.loads(task_data['params'])
except:
task_data['params'] = {}
if task_data.get('tags'):
try:
task_data['tags'] = json.loads(task_data['tags'])
except:
task_data['tags'] = []
# 使用本地时间替换原始时间
task_data['created_at'] = task_data.pop('created_at_local')
task_data['last_modified'] = task_data.pop('last_modified_local')
# 获取依赖任务信息
cursor.execute('''
SELECT td.depends_on, COALESCE(mt.name, st.name) as depends_on_name
FROM task_dependencies td
LEFT JOIN main_tasks mt ON td.depends_on = mt.task_id
LEFT JOIN sub_tasks st ON td.depends_on = st.task_id
WHERE td.task_id = ?
''', (task_id,))
dependencies = cursor.fetchall()
if dependencies:
task_data['depends_on'] = {
'task_id': dependencies[0][0],
'name': dependencies[0][1]
}
return task_data
def create_sub_task(self, parent_id: str, task_data: Dict) -> bool:
"""创建新的子任务"""
try:
cursor = self.conn.cursor()
logger.info(f"开始创建子任务,父任务ID: {parent_id}")
logger.info(f"任务数据: {json.dumps(task_data, ensure_ascii=False)}")
# 检查父任务是否存在
cursor.execute("SELECT COUNT(*) FROM main_tasks WHERE task_id = ?", (parent_id,))
if cursor.fetchone()[0] == 0:
logger.error(f"父任务 '{parent_id}' 不存在")
return False
# 获取当前最大序号
cursor.execute("""
SELECT COALESCE(MAX(sequence_number), 0)
FROM sub_tasks
WHERE parent_id = ?
""", (parent_id,))
max_sequence = cursor.fetchone()[0]
logger.info(f"当前最大序号: {max_sequence}")
# 准备任务数据
task_id = task_data.get('task_id')
if not task_id:
logger.error("未提供子任务ID")
return False
# 检查任务ID是否已存在
cursor.execute("""
SELECT COUNT(*) FROM sub_tasks WHERE task_id = ?
""", (task_id,))
if cursor.fetchone()[0] > 0:
logger.error(f"子任务ID '{task_id}' 已存在")
return False
params = task_data.get('params', {})
# 如果是发送邮件任务,确保params包含必要的内容
if task_data.get('endpoint') == '/log/send-email' and not params:
params = {
"content": None,
"mode": "simple",
"subject": "B站历史记录日报 - {current_time}"
}
logger.info(f"为发送邮件子任务 '{task_id}' 自动添加默认参数")
params_json = json.dumps(params) if params else None
tags = json.dumps(task_data.get('tags', [])) if task_data.get('tags') else '[]'
# 插入子任务
logger.info(f"开始插入子任务记录: {task_id}")
cursor.execute("""
INSERT INTO sub_tasks (
task_id, parent_id, name, sequence_number,
endpoint, method, params, schedule_type, enabled
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
task_id,
parent_id,
task_data.get('name', task_id),
max_sequence + 1,
task_data.get('endpoint', ''),
task_data.get('method', 'GET'),
params_json,
task_data.get('schedule_type', 'daily'),
task_data.get('enabled', 1)
))
logger.info("子任务记录插入成功")
# 初始化子任务状态
logger.info(f"开始初始化子任务状态: {task_id}")
cursor.execute('''
INSERT INTO sub_task_status (task_id, tags)
VALUES (?, ?)
''', (task_id, tags))
logger.info("子任务状态初始化成功")
# 处理依赖关系
if 'depends_on' in task_data:
logger.info(f"发现依赖关系配置: {json.dumps(task_data.get('depends_on', {}), ensure_ascii=False)}")
if task_data['depends_on'] and isinstance(task_data['depends_on'], dict):
depends_on_id = task_data['depends_on'].get('task_id')
logger.info(f"依赖任务ID: {depends_on_id}")
if depends_on_id:
try:
logger.info(f"开始插入依赖关系: {task_id} -> {depends_on_id}")
cursor.execute("""
INSERT INTO task_dependencies (task_id, depends_on)
VALUES (?, ?)
""", (task_id, depends_on_id))
logger.info("依赖关系插入成功")
except Exception as e:
logger.error(f"插入依赖关系时出错: {str(e)}")
raise
else:
logger.warning("依赖关系数据格式不正确")
else:
logger.info("没有依赖关系需要处理")
self.conn.commit()
logger.info(f"成功创建子任务 '{task_id}'")
return True
except Exception as e:
self.conn.rollback()
logger.error(f"创建子任务失败: {str(e)}")
return False
def update_subtask(self, task_id: str, task_data: Dict) -> bool:
"""更新子任务信息"""
try:
cursor = self.conn.cursor()
# 检查任务是否存在且为子任务
cursor.execute("SELECT COUNT(*) FROM sub_tasks WHERE task_id = ?", (task_id,))
if cursor.fetchone()[0] == 0:
logger.error(f"子任务 '{task_id}' 不存在")
return False
# 准备更新字段
fields = []
values = []
for key, value in task_data.items():
if key in ['name', 'endpoint', 'method', 'schedule_type', 'enabled']:
fields.append(f"{key} = ?")
values.append(value)
elif key == 'params':
# 如果是发送邮件任务,确保params包含必要的内容
if 'endpoint' in task_data and task_data['endpoint'] == '/log/send-email':
if not value:
value = {
"content": None,
"mode": "simple",
"subject": "B站历史记录日报 - {current_time}"
}
logger.info(f"为发送邮件子任务 '{task_id}' 自动添加默认参数")
fields.append("params = ?")
values.append(json.dumps(value))
elif key == 'tags':
# 更新子任务状态表中的标签
try:
cursor.execute("""
UPDATE sub_task_status
SET tags = ?
WHERE task_id = ?
""", (json.dumps(value), task_id))
except Exception as e:
logger.error(f"更新子任务标签失败: {str(e)}")
# 添加最后修改时间
fields.append("last_modified = ?")
values.append(datetime.now().isoformat())
# 更新依赖关系
if 'depends_on' in task_data:
# 删除现有依赖
cursor.execute("DELETE FROM task_dependencies WHERE task_id = ?", (task_id,))
# 添加新依赖
if task_data['depends_on'] and isinstance(task_data['depends_on'], dict):
depends_on_id = task_data['depends_on'].get('task_id')
if depends_on_id:
try:
cursor.execute("""
INSERT INTO task_dependencies (task_id, depends_on)
VALUES (?, ?)
""", (task_id, depends_on_id))
except Exception as e:
logger.error(f"更新依赖关系失败: {str(e)}")
# 添加任务ID
values.append(task_id)
if fields:
# 构建更新SQL
sql = f"UPDATE sub_tasks SET {', '.join(fields)} WHERE task_id = ?"
cursor.execute(sql, values)
self.conn.commit()
logger.info(f"成功更新子任务 '{task_id}'")
return True
else:
logger.warning(f"没有提供有效的更新字段")
return False
except Exception as e:
self.conn.rollback()
logger.error(f"更新子任务失败: {str(e)}")
return False
def delete_subtask(self, task_id: str, parent_id: str = None) -> bool:
"""删除子任务,可选指定父任务ID以确保只删除特定主任务下的子任务"""
try:
cursor = self.conn.cursor()
# 检查任务是否存在且为子任务
if parent_id:
cursor.execute("SELECT COUNT(*) FROM sub_tasks WHERE task_id = ? AND parent_id = ?", (task_id, parent_id))
else:
cursor.execute("SELECT COUNT(*) FROM sub_tasks WHERE task_id = ?", (task_id,))
if cursor.fetchone()[0] == 0:
logger.error(f"子任务 '{task_id}' 不存在" + (f" 或不属于主任务 '{parent_id}'" if parent_id else ""))
return False
# 开启事务
self.conn.execute("BEGIN TRANSACTION")
# 删除依赖关系
cursor.execute("DELETE FROM task_dependencies WHERE task_id = ? OR depends_on = ?",
(task_id, task_id))
# 删除子任务状态
cursor.execute("DELETE FROM sub_task_status WHERE task_id = ?", (task_id,))
# 删除子任务执行历史记录
cursor.execute("DELETE FROM sub_task_executions WHERE task_id = ?", (task_id,))
# 删除子任务
if parent_id:
cursor.execute("DELETE FROM sub_tasks WHERE task_id = ? AND parent_id = ?", (task_id, parent_id))
else:
cursor.execute("DELETE FROM sub_tasks WHERE task_id = ?", (task_id,))
# 重新排序剩余子任务
# 如果提供了parent_id,只重新排序该主任务下的子任务
if parent_id:
cursor.execute("""
SELECT task_id, sequence_number
FROM sub_tasks
WHERE parent_id = ?
ORDER BY sequence_number
""", (parent_id,))
subtasks = cursor.fetchall()
# 更新序号
for i, task in enumerate(subtasks, 1):
if task[1] != i: # 如果序号不匹配才更新
cursor.execute("""
UPDATE sub_tasks SET sequence_number = ? WHERE task_id = ?
""", (i, task[0]))
else:
# 按父任务分组重新排序所有子任务
cursor.execute("""
SELECT task_id, parent_id, sequence_number
FROM sub_tasks
ORDER BY parent_id, sequence_number
""")
subtasks = cursor.fetchall()
# 按父任务分组
subtasks_by_parent = {}
for subtask in subtasks:
if subtask[1] not in subtasks_by_parent:
subtasks_by_parent[subtask[1]] = []
subtasks_by_parent[subtask[1]].append(subtask)
# 更新序号
for parent_id, tasks in subtasks_by_parent.items():
for i, task in enumerate(tasks, 1):
if task[2] != i: # 如果序号不匹配才更新
cursor.execute("""
UPDATE sub_tasks SET sequence_number = ? WHERE task_id = ?
""", (i, task[0]))
self.conn.commit()
logger.info(f"成功删除子任务 '{task_id}'" + (f" (主任务: {parent_id})" if parent_id else ""))
return True
except Exception as e:
self.conn.rollback()
logger.error(f"删除子任务失败: {str(e)}")
return False
def delete_sub_task(self, parent_id: str, task_id: str) -> bool:
"""删除指定主任务下的特定子任务,确保ID相同时不会错删除主任务"""
return self.delete_subtask(task_id, parent_id)
def reorder_subtasks(self, parent_id: str, task_order: List[str]) -> bool:
"""重新排序子任务"""
try:
cursor = self.conn.cursor()
# 检查主任务是否存在
cursor.execute("SELECT COUNT(*) FROM main_tasks WHERE task_id = ?", (parent_id,))
if cursor.fetchone()[0] == 0:
logger.error(f"主任务 '{parent_id}' 不存在")
return False
# 获取所有子任务
cursor.execute("""
SELECT task_id FROM sub_tasks WHERE parent_id = ?
""", (parent_id,))
existing_subtasks = [row[0] for row in cursor.fetchall()]
# 验证输入的任务列表
if set(task_order) != set(existing_subtasks):
logger.error(f"提供的任务列表与实际子任务不匹配")
return False
# 更新序号
for i, task_id in enumerate(task_order, 1):
cursor.execute("""
UPDATE sub_tasks SET sequence_number = ? WHERE task_id = ?
""", (i, task_id))
self.conn.commit()
logger.info(f"成功重排主任务 '{parent_id}' 的子任务顺序")
return True
except Exception as e:
self.conn.rollback()
logger.error(f"重排子任务顺序失败: {str(e)}")
return False
def is_main_task(self, task_id: str) -> bool:
"""检查指定任务是否为主任务"""
cursor = self.conn.cursor()
cursor.execute("SELECT COUNT(*) FROM main_tasks WHERE task_id = ?", (task_id,))
row = cursor.fetchone()
return row and row[0] > 0
def get_task_execution_history_enhanced(
self,
task_id: str,
include_subtasks: bool = True,
conditions: dict = None,
page: int = 1,
page_size: int = 20
) -> Dict:
"""获取任务的执行历史(增强版)"""
cursor = self.conn.cursor()
params = []
where_clauses = []
# 基础查询
if self.is_main_task(task_id):
# 主任务查询
base_query = """
SELECT te.*, mt.name as task_name, 'main' as task_type, NULL as parent_id
FROM task_executions te
LEFT JOIN main_tasks mt ON te.task_id = mt.task_id
"""
where_clauses.append("te.task_id = ?")
params.append(task_id)
if include_subtasks:
# 添加子任务历史
base_query = """
SELECT te.*, mt.name as task_name, 'main' as task_type, NULL as parent_id
FROM task_executions te
LEFT JOIN main_tasks mt ON te.task_id = mt.task_id
WHERE te.task_id = ?
UNION ALL
SELECT ste.*, st.name as task_name, 'sub' as task_type, st.parent_id
FROM sub_task_executions ste
LEFT JOIN sub_tasks st ON ste.task_id = st.task_id
WHERE st.parent_id = ?
"""
params = [task_id, task_id] # 重置参数列表
else:
# 子任务查询
base_query = """
SELECT ste.*, st.name as task_name, 'sub' as task_type, st.parent_id
FROM sub_task_executions ste
LEFT JOIN sub_tasks st ON ste.task_id = st.task_id
"""
where_clauses.append("ste.task_id = ?")
params.append(task_id)
# 添加条件过滤
if conditions:
if conditions.get('status'):
where_clauses.append("status = ?")
params.append(conditions['status'])
if conditions.get('start_date'):
where_clauses.append("start_time >= ?")
params.append(conditions['start_date'])
if conditions.get('end_date'):
where_clauses.append("start_time <= ?")
params.append(conditions['end_date'])
# 构建完整查询
if not include_subtasks or not self.is_main_task(task_id):
# 如果不包含子任务或者是子任务查询,使用 WHERE 子句
where_clause = " AND ".join(where_clauses) if where_clauses else "1=1"
final_query = f"{base_query} WHERE {where_clause}"
else:
# 如果是包含子任务的主任务查询,base_query 已经包含了完整的查询条件
final_query = base_query
# 获取总记录数
count_query = f"SELECT COUNT(*) FROM ({final_query})"
cursor.execute(count_query, params)
total_count = cursor.fetchone()[0]
# 添加分页和排序
final_query = f"""
{final_query}
ORDER BY start_time DESC
LIMIT ? OFFSET ?
"""
params.extend([page_size, (page - 1) * page_size])
# 执行查询
cursor.execute(final_query, params)
rows = cursor.fetchall()
# 处理结果
records = []
for row in rows:
record = dict(row)
# 处理输出字段(如果存在)
if record.get('output'):
try:
record['output'] = json.loads(record['output'])
except:
pass
records.append(record)
return {
'records': records,
'total': total_count
}
def get_task_dependencies(self, task_id: str) -> List[str]:
"""获取任务的依赖项"""
cursor = self.conn.cursor()
cursor.execute("""
SELECT depends_on FROM task_dependencies WHERE task_id = ?
""", (task_id,))
return [row[0] for row in cursor.fetchall()]
def record_task_execution_enhanced(self,
task_id: str,
start_time: str,
end_time: str = None,
duration: float = None,
status: str = "success",
error_message: str = None,
triggered_by: str = None,
output: str = None,
parent_execution_id: int = None,
next_run_time: str = None) -> int:
"""记录任务执行(增强版)"""
try:
cursor = self.conn.cursor()
# 如果提供了开始时间和结束时间,但没有提供持续时间,尝试计算
if start_time and end_time and duration is None:
try:
start_dt = datetime.fromisoformat(start_time)
end_dt = datetime.fromisoformat(end_time)
duration = (end_dt - start_dt).total_seconds()
except Exception as e:
logger.warning(f"计算任务持续时间失败: {str(e)}")
# 确定任务类型(主任务或子任务)
is_sub_task = not self.is_main_task(task_id)
# 获取任务配置以确定下次执行时间
task_config = None
if is_sub_task:
task_config = self.get_subtask_by_id(task_id)
else:
task_config = self.get_main_task_by_id(task_id)
# 如果没有提供 next_run_time,则计算它
if next_run_time is None:
schedule_type = task_config.get('schedule_type') if task_config else None
# 只有主任务时才计算下次执行时间
if not is_sub_task:
if schedule_type == 'daily':
schedule_time = task_config.get('schedule_time')
if schedule_time:
try:
current_dt = datetime.fromisoformat(start_time)
schedule_parts = schedule_time.split(':')
next_dt = current_dt.replace(
hour=int(schedule_parts[0]),
minute=int(schedule_parts[1]),
second=0,
microsecond=0
)
# 如果当前时间已经过了今天的调度时间,设置为明天
if current_dt >= next_dt:
next_dt = next_dt + timedelta(days=1)
next_run_time = next_dt.strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"计算得到下次执行时间: {next_run_time}")
except Exception as e:
logger.error(f"计算下次执行时间失败: {str(e)}")
next_run_time = None
elif schedule_type == 'interval':
# 处理间隔任务
interval_value = task_config.get('interval_value')
interval_unit = task_config.get('interval_unit')
if interval_value and interval_unit:
try:
current_dt = datetime.fromisoformat(start_time if end_time is None else end_time)
# 根据间隔值和单位计算下次执行时间
if interval_unit == 'minutes':
next_dt = current_dt + timedelta(minutes=interval_value)
elif interval_unit == 'hours':
next_dt = current_dt + timedelta(hours=interval_value)
elif interval_unit == 'days':
next_dt = current_dt + timedelta(days=interval_value)
elif interval_unit == 'weeks':
next_dt = current_dt + timedelta(weeks=interval_value)
elif interval_unit == 'months':
# 手动计算月份
year = current_dt.year
month = current_dt.month + interval_value
# 处理月份溢出
while month > 12:
month -= 12
year += 1
# 处理月份天数问题(例如,1月31日 + 1个月)
day = min(current_dt.day, calendar.monthrange(year, month)[1])
next_dt = current_dt.replace(year=year, month=month, day=day)
elif interval_unit == 'years':
# 处理闰年问题
year = current_dt.year + interval_value
month = current_dt.month
day = min(current_dt.day, calendar.monthrange(year, month)[1])
next_dt = current_dt.replace(year=year, day=day)
else:
logger.warning(f"不支持的间隔单位: {interval_unit}")
next_dt = None
if next_dt:
next_run_time = next_dt.strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"计算得到间隔任务下次执行时间: {next_run_time},间隔: {interval_value} {interval_unit}")
except Exception as e:
logger.error(f"计算间隔任务下次执行时间失败: {str(e)}")
next_run_time = None
# 根据任务类型选择表
table_name = "sub_task_executions" if is_sub_task else "task_executions"
status_table = "sub_task_status" if is_sub_task else "task_status"
# 构建插入语句
fields = ["task_id", "start_time", "end_time", "duration",
"status", "error_message", "triggered_by", "output"]
values = [task_id, start_time, end_time, duration,
status, error_message, triggered_by, output]
if next_run_time is not None:
fields.append("next_run_time")
values.append(next_run_time)
# 如果提供了父执行ID,添加到字段列表
if parent_execution_id is not None:
fields.append("parent_execution_id")
values.append(parent_execution_id)
# 构建SQL语句
placeholders = ["?" for _ in values]
sql = f"""
INSERT INTO {table_name}
({', '.join(fields)})
VALUES ({', '.join(placeholders)})
"""
# 执行插入
cursor.execute(sql, values)
execution_id = cursor.lastrowid
# 更新任务状态
# 先获取当前状态以计算成功率和平均执行时间
cursor.execute(f'''
SELECT total_runs, success_runs, fail_runs, avg_duration
FROM {status_table}
WHERE task_id = ?
''', (task_id,))
current_stats = cursor.fetchone()
if current_stats:
total_runs = current_stats[0] + 1
success_runs = current_stats[1] + (1 if status == 'success' else 0)
fail_runs = current_stats[2] + (1 if status != 'success' else 0)
success_rate = (success_runs / total_runs) * 100 if total_runs > 0 else 0
# 计算新的平均执行时间
current_avg_duration = current_stats[3] or 0
if duration is not None:
avg_duration = ((current_avg_duration * (total_runs - 1)) + duration) / total_runs
else:
avg_duration = current_avg_duration
cursor.execute(f'''
UPDATE {status_table}
SET last_run_time = ?,
next_run_time = ?,
last_status = ?,
last_error = ?,
total_runs = ?,
success_runs = ?,
fail_runs = ?,
success_rate = ?,
avg_duration = ?
WHERE task_id = ?
''', (
start_time,
next_run_time,
status,
error_message,
total_runs,
success_runs,
fail_runs,
success_rate,
avg_duration,
task_id
))
else:
# 如果没有状态记录,创建一个新的
cursor.execute(f'''
INSERT INTO {status_table}
(task_id, last_run_time, next_run_time, last_status, last_error,
total_runs, success_runs, fail_runs, success_rate, avg_duration)
VALUES (?, ?, ?, ?, ?, 1, ?, ?, ?, ?)
''', (
task_id,
start_time,
next_run_time,
status,
error_message,
1 if status == 'success' else 0,
1 if status != 'success' else 0,
100 if status == 'success' else 0,
duration or 0
))
self.conn.commit()
return execution_id
except Exception as e:
self.conn.rollback()
logger.error(f"记录任务执行失败: {str(e)}")
return -1
def _calculate_next_run_time(self, task_data: Dict) -> Optional[datetime]:
"""计算下次执行时间"""
schedule_type = task_data.get('schedule_type')
if not schedule_type:
return None
now = datetime.now()
if schedule_type == 'daily':
schedule_time = task_data.get('schedule_time')
if not schedule_time:
return None
hour, minute = map(int, schedule_time.split(':'))
next_run = now.replace(hour=hour, minute=minute, second=0, microsecond=0)
if next_run <= now:
next_run += timedelta(days=1)
elif schedule_type == 'interval':
# 优先使用interval_value和interval_unit字段,如果不存在则尝试使用interval和unit字段
interval_value = task_data.get('interval_value', task_data.get('interval'))
interval_unit = task_data.get('interval_unit', task_data.get('unit'))
if not interval_value:
logger.error(f"间隔任务缺少间隔值: {task_data}")
return None
if not interval_unit:
logger.error(f"间隔任务缺少间隔单位: {task_data}")
return None
logger.info(f"计算间隔任务下次执行时间: 间隔值={interval_value}, 单位={interval_unit}")
if interval_value and interval_unit:
try:
now = datetime.now()
# 根据间隔值和单位计算下次执行时间
if interval_unit == 'minutes':
next_dt = now + timedelta(minutes=interval_value)
elif interval_unit == 'hours':
next_dt = now + timedelta(hours=interval_value)
elif interval_unit == 'days':
next_dt = now + timedelta(days=interval_value)
elif interval_unit == 'weeks':
next_dt = now + timedelta(weeks=interval_value)
elif interval_unit == 'months':
# 手动计算月份
year = now.year
month = now.month + interval_value
# 处理月份溢出
while month > 12:
month -= 12
year += 1
# 处理月份天数问题
day = min(now.day, calendar.monthrange(year, month)[1])
next_dt = now.replace(year=year, month=month, day=day)
elif interval_unit == 'years':
# 处理闰年问题
year = now.year + interval_value
month = now.month
day = min(now.day, calendar.monthrange(year, month)[1])
next_dt = now.replace(year=year, day=day)
else:
logger.warning(f"不支持的间隔单位: {interval_unit}")
return None
next_run_time = next_dt.strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"计算得到间隔任务下次执行时间: {next_run_time},间隔: {interval_value} {interval_unit}")
return next_dt
except Exception as e:
logger.error(f"计算间隔任务下次执行时间失败: {str(e)}")
return None
else:
logger.error(f"间隔任务缺少必要参数: interval_value={interval_value}, interval_unit={interval_unit}")
return None
elif schedule_type == 'once':
delay = task_data.get('delay', 0)
next_run = now + timedelta(seconds=delay)
else:
return None
return next_run
def update_next_execution_time(self, task_id: str, next_run_time: Optional[str] = None):
"""
更新任务的下次执行时间
Args:
task_id: 任务ID
next_run_time: 下次执行时间,如果为None则自动计算
Returns:
bool: 更新是否成功
"""
try:
# 确定任务类型(主任务或子任务)
is_sub_task = not self.is_main_task(task_id)
# 如果是子任务,不处理下次执行时间
if is_sub_task:
logger.warning(f"子任务 {task_id} 不支持更新下次执行时间")
return False
# 获取任务配置
task_config = self.get_main_task_by_id(task_id)
if not task_config:
logger.error(f"任务 {task_id} 不存在")
return False
# 如果没有提供 next_run_time,则计算它
if next_run_time is None:
schedule_type = task_config.get('schedule_type')
if schedule_type == 'daily':
schedule_time = task_config.get('schedule_time')
if schedule_time:
try:
now = datetime.now()
schedule_parts = schedule_time.split(':')
next_dt = now.replace(
hour=int(schedule_parts[0]),
minute=int(schedule_parts[1]),
second=0,
microsecond=0
)
# 如果当前时间已经过了今天的调度时间,设置为明天
if now >= next_dt:
next_dt = next_dt + timedelta(days=1)
next_run_time = next_dt.strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"计算得到下次执行时间: {next_run_time}")
except Exception as e:
logger.error(f"计算下次执行时间失败: {str(e)}")
return False
elif schedule_type == 'interval':
# 处理间隔任务
interval_value = task_config.get('interval_value')
interval_unit = task_config.get('interval_unit')
# 如果interval_value和interval_unit不存在,尝试使用旧的字段名
if interval_value is None:
interval_value = task_config.get('interval')
logger.info(f"使用旧的interval字段: {interval_value}")
if interval_unit is None:
interval_unit = task_config.get('unit')
logger.info(f"使用旧的unit字段: {interval_unit}")
logger.info(f"准备计算间隔任务时间: 任务={task_id}, 间隔值={interval_value}, 单位={interval_unit}")
if interval_value and interval_unit:
try:
now = datetime.now()
# 根据间隔值和单位计算下次执行时间
if interval_unit == 'minutes':
next_dt = now + timedelta(minutes=interval_value)
elif interval_unit == 'hours':
next_dt = now + timedelta(hours=interval_value)
elif interval_unit == 'days':
next_dt = now + timedelta(days=interval_value)
elif interval_unit == 'weeks':
next_dt = now + timedelta(weeks=interval_value)
elif interval_unit == 'months':
# 手动计算月份
year = now.year
month = now.month + interval_value
# 处理月份溢出
while month > 12:
month -= 12
year += 1
# 处理月份天数问题
day = min(now.day, calendar.monthrange(year, month)[1])
next_dt = now.replace(year=year, month=month, day=day)
elif interval_unit == 'years':
# 处理闰年问题
year = now.year + interval_value
month = now.month
day = min(now.day, calendar.monthrange(year, month)[1])
next_dt = now.replace(year=year, day=day)
else:
logger.warning(f"不支持的间隔单位: {interval_unit}")
return False
next_run_time = next_dt.strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"计算得到间隔任务下次执行时间: {next_run_time},间隔: {interval_value} {interval_unit}")
except Exception as e:
logger.error(f"计算间隔任务下次执行时间失败: {str(e)}")
return False
else:
logger.error(f"间隔任务缺少必要参数: interval_value={interval_value}, interval_unit={interval_unit}")
return False
else:
logger.warning(f"任务 {task_id} 的计划类型 {schedule_type} 不支持自动计算下次执行时间")
return False
# 更新数据库中的下次执行时间
cursor = self.conn.cursor()
cursor.execute('''
UPDATE task_status
SET next_run_time = ?
WHERE task_id = ?
''', (next_run_time, task_id))
# 如果没有状态记录,创建一个新的
if cursor.rowcount == 0:
cursor.execute('''
INSERT INTO task_status
(task_id, next_run_time)
VALUES (?, ?)
''', (task_id, next_run_time))
self.conn.commit()
logger.info(f"成功更新任务 {task_id} 的下次执行时间: {next_run_time}")
return True
except Exception as e:
self.conn.rollback()
logger.error(f"更新任务下次执行时间失败: {str(e)}")
return False
def load_config(self):
"""加载调度器配置"""
try:
from scripts.utils import get_config_path
config_path = get_config_path('scheduler_config.yaml')
if not os.path.exists(config_path):
return False
with open(config_path, 'r', encoding='utf-8') as f:
config = yaml.safe_load(f)
if 'tasks' not in config:
return False
self.config = config
return True
except Exception as e:
logger.error(f"加载配置文件失败: {e}")
return False
|
2977094657/BilibiliHistoryFetcher
| 5,807
|
scripts/scheduler.py
|
import logging
import time
from datetime import datetime
from pathlib import Path
import requests
import schedule
import yaml
# 配置日志
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler('scheduler.log', encoding='utf-8'),
logging.StreamHandler()
]
)
class TaskScheduler:
def __init__(self, config_path=None):
# 设置配置文件路径
if config_path is None:
from scripts.utils import get_config_path
config_path = get_config_path('scheduler_config.yaml')
self.config_path = config_path
self.load_config()
self.task_chains = {}
self._init_task_status()
self.base_url = "http://localhost:8899"
self.tasks = {}
self.start_time = datetime.now()
logging.info(f"调度器初始化完成,启动时间: {self.start_time}")
def load_config(self):
"""加载调度配置"""
try:
with open(self.config_path, 'r', encoding='utf-8') as f:
config = yaml.safe_load(f)
self.base_url = config.get('base_url', self.base_url)
self.tasks = config.get('tasks', {})
logging.info(f"成功加载配置文件: {self.config_path}")
logging.info(f"已配置的任务: {list(self.tasks.keys())}")
except Exception as e:
logging.error(f"加载配置文件失败: {e}")
def execute_task(self, task_name):
"""执行指定任务"""
if task_name not in self.tasks:
logging.error(f"未找到任务: {task_name}")
return
task = self.tasks[task_name]
url = f"{self.base_url}{task['endpoint']}"
method = task.get('method', 'GET').upper()
params = task.get('params', {})
try:
logging.info(f"开始执行任务: {task_name}")
logging.info(f"请求: {method} {url}")
logging.info(f"参数: {params}")
if method == 'GET':
response = requests.get(url, params=params)
elif method == 'POST':
response = requests.post(url, json=params)
else:
logging.error(f"不支持的HTTP方法: {method}")
return
if response.status_code == 200:
logging.info(f"任务 {task_name} 执行成功")
logging.info(f"响应: {response.json()}")
else:
logging.error(f"任务 {task_name} 执行失败: {response.status_code}")
logging.error(f"错误信息: {response.text}")
except Exception as e:
logging.error(f"执行任务 {task_name} 时发生错误: {e}")
def schedule_tasks(self):
"""设置所有任务的调度"""
for task_name, task in self.tasks.items():
if 'schedule' in task:
schedule_info = task['schedule']
schedule_type = schedule_info.get('type')
if schedule_type == 'daily':
time_str = schedule_info.get('time', '00:00')
schedule.every().day.at(time_str).do(self.execute_task, task_name)
logging.info(f"已设置每日 {time_str} 执行任务: {task_name}")
elif schedule_type == 'interval':
interval = schedule_info.get('interval', 1)
unit = schedule_info.get('unit', 'hours')
if unit == 'minutes':
schedule.every(interval).minutes.do(self.execute_task, task_name)
elif unit == 'hours':
schedule.every(interval).hours.do(self.execute_task, task_name)
elif unit == 'days':
schedule.every(interval).days.do(self.execute_task, task_name)
elif unit == 'months':
schedule.every(interval).months.do(self.execute_task, task_name)
elif unit == 'years':
schedule.every(interval).years.do(self.execute_task, task_name)
logging.info(f"已设置每 {interval} {unit} 执行任务: {task_name}")
def run(self):
"""运行调度器"""
self.schedule_tasks()
logging.info("调度器已启动,等待执行任务...")
while True:
try:
schedule.run_pending()
time.sleep(1)
except Exception as e:
logging.error(f"调度器运行错误: {e}")
time.sleep(60) # 发生错误时等待1分钟后继续
def create_default_config():
"""创建默认配置文件"""
default_config = {
'base_url': 'http://localhost:8000',
'tasks': {
'fetch_history': {
'name': '获取历史记录',
'endpoint': '/fetch/bili-history',
'method': 'GET',
'params': {},
'schedule': {
'type': 'daily',
'time': '00:00'
}
},
'clean_data': {
'name': '清理数据',
'endpoint': '/clean/data',
'method': 'POST',
'params': {},
'schedule': {
'type': 'interval',
'interval': 12,
'unit': 'hours'
}
}
}
}
config_dir = Path('config')
config_dir.mkdir(exist_ok=True)
config_path = config_dir / 'scheduler_config.yaml'
if not config_path.exists():
with open(config_path, 'w', encoding='utf-8') as f:
yaml.dump(default_config, f, allow_unicode=True, sort_keys=False)
logging.info(f"已创建默认配置文件: {config_path}")
return config_path
if __name__ == '__main__':
# 确保配置文件存在
config_path = create_default_config()
# 创建并运行调度器
scheduler = TaskScheduler(config_path)
scheduler.run()
|
2977094657/BilibiliHistoryFetcher
| 16,111
|
scripts/import_sqlite.py
|
import json
import logging
import os
import sqlite3
import threading
import time
from datetime import datetime
from config.sql_statements_sqlite import CREATE_TABLE_DEFAULT, CREATE_INDEXES, INSERT_DATA
from scripts.utils import load_config, get_base_path, get_output_path
config = load_config()
# 配置日志记录
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
IS_SCRIPT_RUN = True
def get_base_path():
return os.path.dirname(os.path.dirname(os.path.abspath(__file__))) if IS_SCRIPT_RUN else os.getcwd()
# 定义读取分类信息的函数
def load_categories(config_path=None):
if config_path is None:
base_path = get_base_path()
config_path = os.path.join(base_path, 'config', 'categories.json')
"""从 categories.json 文件中加载分类信息"""
try:
with open(config_path, 'r', encoding='utf-8') as f:
categories = json.load(f)
duplicated_tags = set(categories.get('duplicated_tags', []))
unique_tag_to_main = categories.get('unique_tag_to_main', {})
logger.info(f"成功加载分类信息:{config_path}")
return duplicated_tags, unique_tag_to_main
except Exception as e:
logger.error(f"加载分类信息时发生错误: {e}")
return set(), {}
# 加载分类信息
duplicated_tags, unique_tag_to_main = load_categories()
class SnowflakeIDGenerator:
def __init__(self, machine_id=1, datacenter_id=1):
self.lock = threading.Lock()
self.machine_id = machine_id & 0x3FF
self.datacenter_id = datacenter_id & 0x3FF
self.sequence = 0
self.last_timestamp = -1
self.epoch = 1609459200000
def _current_millis(self):
return int(time.time() * 1000)
def get_id(self):
with self.lock:
timestamp = self._current_millis()
if timestamp < self.last_timestamp:
raise Exception("时钟向后移动。拒绝生成 id。")
if timestamp == self.last_timestamp:
self.sequence = (self.sequence + 1) & 0xFFF
if self.sequence == 0:
while timestamp <= self.last_timestamp:
timestamp = self._current_millis()
else:
self.sequence = 0
self.last_timestamp = timestamp
id = ((timestamp - self.epoch) << 22) | (self.datacenter_id << 12) | self.sequence
return id
id_generator = SnowflakeIDGenerator(machine_id=1, datacenter_id=1)
def get_years():
current_year = datetime.now().year
previous_year = current_year - 1
return current_year, previous_year
def create_connection(db_file):
"""创建数据库连接"""
try:
os.makedirs(os.path.dirname(db_file), exist_ok=True)
conn = sqlite3.connect(db_file)
logger.info(f"成功连接到SQLite数据库: {db_file}")
return conn
except sqlite3.Error as e:
logger.error(f"连接数据库时发生错误: {e}")
return None
def table_exists(conn, table_name):
"""检查表是否存在"""
cursor = conn.cursor()
cursor.execute("""
SELECT count(name) FROM sqlite_master
WHERE type='table' AND name=?
""", (table_name,))
return cursor.fetchone()[0] > 0
def create_table(conn, table_name):
"""创建数据表"""
cursor = conn.cursor()
# 使用 sql_statements_sqlite.py 中的建表语句
cursor.execute(CREATE_TABLE_DEFAULT.format(table=table_name))
# 创建索引
for index_sql in CREATE_INDEXES:
cursor.execute(index_sql.format(table=table_name))
conn.commit()
logger.info(f"成功创建表 {table_name} 及其索引")
def batch_insert_data(conn, table_name, data_batch):
"""批量插入数据"""
cursor = conn.cursor()
# 使用 sql_statements_sqlite.py 中的插入语句
placeholders = ','.join(['?' for _ in range(34)]) # 34个字段
insert_sql = INSERT_DATA.format(table=table_name, placeholders=placeholders)
try:
cursor.executemany(insert_sql, data_batch)
conn.commit()
return len(data_batch)
except sqlite3.Error as e:
logger.error(f"插入数据时发生错误: {e}")
conn.rollback()
return 0
def get_last_import_time():
"""获取上次导入时间"""
try:
last_import_file = os.path.join(get_output_path(), 'last_import.json')
if os.path.exists(last_import_file):
with open(last_import_file, 'r', encoding='utf-8') as f:
data = json.load(f)
return data.get('last_import_time', 0)
logger.info("未找到last_import.json文件,将导入所有数据")
return 0
except Exception as e:
logger.error(f"读取上次导入时间失败: {e}")
return 0
def import_data_from_json(conn, table_name, file_path, last_import_time=0, batch_size=1000, sync_deleted=False):
"""从JSON文件导入数据"""
try:
# 尝试不同的编码方式读取
data = None
for encoding in ['utf-8', 'gbk', 'utf-8-sig']:
try:
with open(file_path, 'r', encoding=encoding) as f:
data = json.load(f)
break
except (UnicodeDecodeError, json.JSONDecodeError):
continue
if data is None:
logger.error(f"无法读取文件 {file_path}:所有编码尝试都失败")
return 0
total_inserted = 0
# 按年份分组数据
data_by_year = {}
has_new_records = False
# 获取现有记录的bvid和view_at组合
cursor = conn.cursor()
existing_records = set()
for year in range(datetime.now().year - 1, datetime.now().year + 1):
table = f"bilibili_history_{year}"
if table_exists(conn, table):
cursor.execute(f"SELECT bvid, view_at FROM {table}")
existing_records.update((bvid, view_at) for bvid, view_at in cursor.fetchall())
# 获取已删除的记录
deleted_records = set()
if not sync_deleted:
# 确保删除记录表存在
cursor.execute("""
CREATE TABLE IF NOT EXISTS deleted_history (
id INTEGER PRIMARY KEY,
bvid TEXT NOT NULL,
view_at INTEGER NOT NULL,
delete_time INTEGER NOT NULL,
UNIQUE(bvid, view_at)
)
""")
# 获取已删除的记录
cursor.execute("SELECT bvid, view_at FROM deleted_history")
deleted_records = set((bvid, view_at) for bvid, view_at in cursor.fetchall())
logger.info(f"已加载 {len(deleted_records)} 条已删除的记录")
# 遍历所有记录,检查每条记录的时间
for item in data:
# 获取观看时间
view_at = item.get('view_at', 0)
if view_at == 0:
continue
# 如果有上次导入时间,则只处理更新的记录
if last_import_time > 0 and view_at <= last_import_time:
logger.debug(f"跳过旧记录: {item.get('title')} - {datetime.fromtimestamp(view_at)}")
continue
# 检查bvid和view_at组合是否已存在
history = item.get('history', {})
bvid = history.get('bvid', '')
if (bvid, view_at) in existing_records:
logger.debug(f"跳过重复记录: {item.get('title')} - {datetime.fromtimestamp(view_at)}")
continue
# 如果不同步已删除的记录,则跳过已删除的记录
if not sync_deleted and (bvid, view_at) in deleted_records:
logger.debug(f"跳过已删除的记录: {item.get('title')} - {datetime.fromtimestamp(view_at)}")
continue
has_new_records = True
year = datetime.fromtimestamp(view_at).year
if year not in data_by_year:
data_by_year[year] = []
main_category = None
business = history.get('business', '')
tag_name = item.get('tag_name', '').strip()
if business == 'archive':
if tag_name in unique_tag_to_main:
main_category = unique_tag_to_main[tag_name]
elif tag_name in duplicated_tags:
main_category = '待定'
else:
main_category = '待定'
# 从正确的位置获取duration和progress
duration = item.get('duration', 0) # 从item对象获取视频总时长
progress = item.get('progress', 0) # 从item对象获取观看进度
record = (
id_generator.get_id(),
item.get('title', ''),
item.get('long_title', ''),
item.get('cover', ''),
json.dumps(item.get('covers', [])),
item.get('uri', ''),
history.get('oid', 0),
history.get('epid', 0),
bvid,
history.get('page', 1),
history.get('cid', 0),
history.get('part', ''),
business,
history.get('dt', 0),
history.get('videos', 0),
item.get('author_name', ''),
item.get('author_face', ''),
item.get('author_mid', 0),
view_at,
progress, # 使用从item对象获取的观看进度
item.get('badge', ''),
item.get('show_title', ''),
duration, # 使用从item对象获取的视频总时长
item.get('current', ''),
item.get('total', 0),
item.get('new_desc', ''),
item.get('is_finish', 0),
item.get('is_fav', 0),
history.get('kid', 0),
tag_name,
item.get('live_status', 0),
main_category,
'', # 默认的空备注
0 # 默认的备注时间为0
)
data_by_year[year].append(record)
existing_records.add((bvid, view_at)) # 添加到已存在记录集合中
# 当达到批量大小时,执行插入
if len(data_by_year[year]) >= batch_size:
year_table = f"{table_name}_{year}"
if not table_exists(conn, year_table):
create_table(conn, year_table)
inserted = batch_insert_data(conn, year_table, data_by_year[year])
total_inserted += inserted
data_by_year[year] = []
# 处理剩余的数据
for year, records in data_by_year.items():
if records:
year_table = f"{table_name}_{year}"
if not table_exists(conn, year_table):
create_table(conn, year_table)
inserted = batch_insert_data(conn, year_table, records)
total_inserted += inserted
return total_inserted
except sqlite3.Error as e:
logger.error(f"导入数据时发生错误: {e}")
return 0
def save_last_import_record(file_path, timestamp):
"""保存最后导入记录"""
record = {
"last_import_file": file_path,
"last_import_time": timestamp,
"last_import_date": datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d")
}
record_file = get_output_path('last_import.json')
with open(record_file, 'w', encoding='utf-8') as f:
json.dump(record, f, ensure_ascii=False, indent=4)
logger.debug(f"已更新导入记录: {record}")
def get_last_import_record():
"""获取最后导入记录"""
record_file = get_output_path('last_import.json')
if os.path.exists(record_file):
with open(record_file, 'r', encoding='utf-8') as f:
return json.load(f)
return None
def import_all_history_files(sync_deleted=False):
"""导入所有历史记录文件"""
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
logger.info(f"========== 运行时间: {current_time} ==========")
logger.info(f"当前工作目录: {os.getcwd()}")
# 使用 get_output_path 获取路径
full_data_folder = get_output_path('history_by_date')
full_db_file = get_output_path(config['db_file'])
logger.info(f"\n=== 路径信息 ===")
logger.info(f"数据文件夹: {full_data_folder}")
logger.info(f"数据库文件: {full_db_file}")
if not os.path.exists(full_data_folder):
message = f"本地文件夹 '{full_data_folder}' 不存在,无法加载数据。"
logger.error(message)
return {"status": "error", "message": message}
# 获取最后导入记录
last_record = get_last_import_record()
last_import_time = last_record['last_import_time'] if last_record else 0
if last_record:
logger.info(f"上次导入记录:")
logger.info(f"- 文件: {last_record['last_import_file']}")
logger.info(f"- 时间: {last_import_time}")
logger.info(f"- 日期: {last_record['last_import_date']}")
else:
logger.info("未找到导入记录,将导入所有数据")
file_insert_counts = {}
logger.info(f"开始遍历并导入文件夹 '{full_data_folder}' 中的数据...")
conn = create_connection(full_db_file)
if conn is None:
message = f"无法连接到数据库 {full_db_file}。"
logger.error(message)
return {"status": "error", "message": message}
try:
# 遍历文件并导入
total_files = 0
total_records = 0
latest_timestamp = 0 # 记录最新的时间戳
latest_file = None # 记录最新的文件
# 获取所有JSON文件并按日期排序
all_json_files = []
for year in sorted(os.listdir(full_data_folder), reverse=True): # 从最新的年份开始
year_path = os.path.join(full_data_folder, year)
if os.path.isdir(year_path) and year.isdigit():
for month in sorted(os.listdir(year_path), reverse=True): # 从最新的月份开始
month_path = os.path.join(year_path, month)
if os.path.isdir(month_path) and month.isdigit():
for day_file in sorted(os.listdir(month_path), reverse=True): # 从最新的日期开始
if day_file.endswith('.json'):
day_path = os.path.join(month_path, day_file)
all_json_files.append(day_path)
for day_path in all_json_files:
logger.info(f"\n处理文件: {day_path}")
# 读取文件中最新的记录时间
try:
with open(day_path, 'r', encoding='utf-8') as f:
data = json.load(f)
if data:
newest_view_at = max(item.get('view_at', 0) for item in data)
logger.info(f"文件中最新记录时间: {datetime.fromtimestamp(newest_view_at)}")
# 更新最新的时间戳
if newest_view_at > latest_timestamp:
latest_timestamp = newest_view_at
latest_file = day_path
# 只有当存在上次导入记录时才进行时间判断
if last_import_time > 0 and newest_view_at <= last_import_time:
logger.info(f"跳过文件 {day_path} 及后续文件: 所有记录都早于上次导入时间")
break
except Exception as e:
logger.error(f"读取文件 {day_path} 时出错: {e}")
continue
inserted_count = import_data_from_json(conn, "bilibili_history", day_path, last_import_time, sync_deleted=sync_deleted)
if inserted_count > 0:
total_files += 1
total_records += inserted_count
file_insert_counts[day_path] = inserted_count
logger.info(f"成功插入 {inserted_count} 条记录")
# 在所有文件处理完成后,使用最新的时间戳更新导入记录
if total_records > 0 and latest_timestamp > 0:
save_last_import_record(latest_file, latest_timestamp)
logger.info(f"更新导入记录为最新时间戳: {datetime.fromtimestamp(latest_timestamp)}")
# 打印导入统计
logger.info("\n=== 导入统计 ===")
logger.info(f"处理文件总数: {total_files}")
logger.info(f"插入记录总数: {total_records}")
if file_insert_counts:
logger.info("\n各文件插入详情:")
for file_path, count in file_insert_counts.items():
logger.info(f"- {os.path.basename(file_path)}: {count} 条记录")
else:
logger.info("\n没有新记录需要插入")
logger.info("================\n")
message = f"数据导入完成,共插入 {total_records} 条记录。"
return {"status": "success", "message": message, "inserted_count": total_records}
except sqlite3.Error as e:
error_msg = f"数据库错误: {str(e)}"
logger.error(f"=== 错误 ===\n{error_msg}\n===========")
return {"status": "error", "message": error_msg}
finally:
if conn:
conn.close()
# 允许脚本独立运行
if __name__ == '__main__':
result = import_all_history_files()
if result["status"] == "success":
print(result["message"])
else:
print(f"错误: {result['message']}")
|
2929004360/ruoyi-sign
| 5,476
|
ruoyi-common/src/main/java/com/ruoyi/common/core/controller/BaseController.java
|
package com.ruoyi.common.core.controller;
import java.beans.PropertyEditorSupport;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import com.ruoyi.common.core.domain.entity.SysUser;
import com.ruoyi.common.exception.DemoModeException;
import com.ruoyi.common.utils.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.ruoyi.common.constant.HttpStatus;
import com.ruoyi.common.core.domain.AjaxResult;
import com.ruoyi.common.core.domain.model.LoginUser;
import com.ruoyi.common.core.page.PageDomain;
import com.ruoyi.common.core.page.TableDataInfo;
import com.ruoyi.common.core.page.TableSupport;
import com.ruoyi.common.utils.sql.SqlUtil;
import org.springframework.web.bind.annotation.ModelAttribute;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* web层通用数据处理
*
* @author ruoyi
*/
public class BaseController
{
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
@ModelAttribute
public void init(HttpServletRequest httpServletRequest, HttpServletResponse response) throws IOException {
try {
if (!SysUser.isAdmin(SecurityUtils.getUserId())) {
this.verificationRequest(httpServletRequest, response);
}
} catch (Exception e) {
this.verificationRequest(httpServletRequest, response);
}
}
/**
* 校验请假
*/
private void verificationRequest(HttpServletRequest httpServletRequest, HttpServletResponse response) {
String url = ServletUtils.getRequest().getRequestURI();
// 需要拦截的url
if (StringUtils.isNotEmpty(url) && (url.indexOf("/genCode") >= 0 || url.indexOf("/export") >= 0)) {
throw new DemoModeException();
}
// 需要放开的url
if (StringUtils.isNotEmpty(url) && (url.contains("/demo") || url.contains("/tool/gen"))) {
return;
}
// 增删改 请求
if ("DELETE".equals(httpServletRequest.getMethod()) || "POST".equals(httpServletRequest.getMethod())
|| "PUT".equals(httpServletRequest.getMethod())) {
throw new DemoModeException();
}
}
/**
* 将前台传递过来的日期格式的字符串,自动转化为Date类型
*/
@InitBinder
public void initBinder(WebDataBinder binder)
{
// Date 类型转换
binder.registerCustomEditor(Date.class, new PropertyEditorSupport()
{
@Override
public void setAsText(String text)
{
setValue(DateUtils.parseDate(text));
}
});
}
/**
* 设置请求分页数据
*/
protected void startPage()
{
PageUtils.startPage();
}
/**
* 设置请求排序数据
*/
protected void startOrderBy()
{
PageDomain pageDomain = TableSupport.buildPageRequest();
if (StringUtils.isNotEmpty(pageDomain.getOrderBy()))
{
String orderBy = SqlUtil.escapeOrderBySql(pageDomain.getOrderBy());
PageHelper.orderBy(orderBy);
}
}
/**
* 清理分页的线程变量
*/
protected void clearPage()
{
PageUtils.clearPage();
}
/**
* 响应请求分页数据
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected TableDataInfo getDataTable(List<?> list)
{
TableDataInfo rspData = new TableDataInfo();
rspData.setCode(HttpStatus.SUCCESS);
rspData.setMsg("查询成功");
rspData.setRows(list);
rspData.setTotal(new PageInfo(list).getTotal());
return rspData;
}
/**
* 返回成功
*/
public AjaxResult success()
{
return AjaxResult.success();
}
/**
* 返回失败消息
*/
public AjaxResult error()
{
return AjaxResult.error();
}
/**
* 返回成功消息
*/
public AjaxResult success(String message)
{
return AjaxResult.success(message);
}
/**
* 返回成功消息
*/
public AjaxResult success(Object data)
{
return AjaxResult.success(data);
}
/**
* 返回失败消息
*/
public AjaxResult error(String message)
{
return AjaxResult.error(message);
}
/**
* 返回警告消息
*/
public AjaxResult warn(String message)
{
return AjaxResult.warn(message);
}
/**
* 响应返回结果
*
* @param rows 影响行数
* @return 操作结果
*/
protected AjaxResult toAjax(int rows)
{
return rows > 0 ? AjaxResult.success() : AjaxResult.error();
}
/**
* 响应返回结果
*
* @param result 结果
* @return 操作结果
*/
protected AjaxResult toAjax(boolean result)
{
return result ? success() : error();
}
/**
* 页面跳转
*/
public String redirect(String url)
{
return StringUtils.format("redirect:{}", url);
}
/**
* 获取用户缓存信息
*/
public LoginUser getLoginUser()
{
return SecurityUtils.getLoginUser();
}
/**
* 获取登录用户id
*/
public Long getUserId()
{
return getLoginUser().getUserId();
}
/**
* 获取登录部门id
*/
public Long getDeptId()
{
return getLoginUser().getDeptId();
}
/**
* 获取登录用户名
*/
public String getUsername()
{
return getLoginUser().getUsername();
}
}
|
2929004360/ruoyi-sign
| 2,235
|
ruoyi-common/src/main/java/com/ruoyi/common/core/redis/RedisLock.java
|
package com.ruoyi.common.core.redis;
import org.redisson.api.RLock;
import org.redisson.api.RedissonClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.concurrent.TimeUnit;
/**
* redis锁工具类
*
* @author ruoyi
*/
@Component
public class RedisLock
{
@Autowired
private RedissonClient redissonClient;
/**
* 获取锁
*
* @param lockKey 锁实例key
* @return 锁信息
*/
public RLock getRLock(String lockKey)
{
return redissonClient.getLock(lockKey);
}
/**
* 加锁
*
* @param lockKey 锁实例key
* @return 锁信息
*/
public RLock lock(String lockKey)
{
RLock lock = getRLock(lockKey);
lock.lock();
return lock;
}
/**
* 加锁
*
* @param lockKey 锁实例key
* @param leaseTime 上锁后自动释放锁时间
* @return true=成功;false=失败
*/
public Boolean tryLock(String lockKey, long leaseTime)
{
return tryLock(lockKey, 0, leaseTime, TimeUnit.SECONDS);
}
/**
* 加锁
*
* @param lockKey 锁实例key
* @param leaseTime 上锁后自动释放锁时间
* @param unit 时间颗粒度
* @return true=加锁成功;false=加锁失败
*/
public Boolean tryLock(String lockKey, long leaseTime, TimeUnit unit)
{
return tryLock(lockKey, 0, leaseTime, unit);
}
/**
* 加锁
*
* @param lockKey 锁实例key
* @param waitTime 最多等待时间
* @param leaseTime 上锁后自动释放锁时间
* @param unit 时间颗粒度
* @return true=加锁成功;false=加锁失败
*/
public Boolean tryLock(String lockKey, long waitTime, long leaseTime, TimeUnit unit)
{
RLock rLock = getRLock(lockKey);
boolean tryLock = false;
try
{
tryLock = rLock.tryLock(waitTime, leaseTime, unit);
}
catch (InterruptedException e)
{
return false;
}
return tryLock;
}
/**
* 释放锁
*
* @param lockKey 锁实例key
*/
public void unlock(String lockKey)
{
RLock lock = getRLock(lockKey);
lock.unlock();
}
/**
* 释放锁
*
* @param lock 锁信息
*/
public void unlock(RLock lock)
{
lock.unlock();
}
}
|
281677160/openwrt-package
| 1,993
|
luci-app-ssr-plus/shadowsocks-libev/files/ss-rules/set.uc
|
{%
let fs = require("fs");
let o_dst_bypass4_ = "
0.0.0.0/8
10.0.0.0/8
100.64.0.0/10
127.0.0.0/8
169.254.0.0/16
172.16.0.0/12
192.0.0.0/24
192.0.2.0/24
192.31.196.0/24
192.52.193.0/24
192.88.99.0/24
192.168.0.0/16
192.175.48.0/24
198.18.0.0/15
198.51.100.0/24
203.0.113.0/24
224.0.0.0/4
240.0.0.0/4
";
let o_dst_bypass6_ = "
::1/128
::/128
::ffff:0:0/96
64:ff9b:1::/48
100::/64
fe80::/10
2001::/23
fc00::/7
";
let o_dst_bypass_ = o_dst_bypass4_ + " " + o_dst_bypass6_;
let set_suffix = {
"src_bypass": {
str: o_src_bypass,
},
"src_forward": {
str: o_src_forward,
},
"src_checkdst": {
str: o_src_checkdst,
},
"dst_bypass": {
str: o_dst_bypass + " " + o_remote_servers,
file: o_dst_bypass_file,
},
"dst_bypass_": {
str: o_dst_bypass_,
},
"dst_forward": {
str: o_dst_forward,
file: o_dst_forward_file,
},
"dst_forward_rrst_": {},
};
function set_name(suf, af) {
if (af == 4) {
return "ss_rules_"+suf;
} else {
return "ss_rules6_"+suf;
}
}
function set_elements_parse(res, str, af) {
for (let addr in split(str, /[ \t\n]/)) {
addr = trim(addr);
if (!addr) continue;
if (af == 4 && index(addr, ":") != -1) continue;
if (af == 6 && index(addr, ":") == -1) continue;
push(res, addr);
}
}
function set_elements(suf, af) {
let obj = set_suffix[suf];
let res = [];
let addr;
let str = obj["str"];
if (str) {
set_elements_parse(res, str, af);
}
let file = obj["file"];
if (file) {
let fd = fs.open(file);
if (fd) {
str = fd.read("all");
set_elements_parse(res, str, af);
}
}
return res;
}
%}
{% for (let suf in set_suffix): for (let af in [4, 6]): %}
set {{ set_name(suf, af) }} {
type ipv{{af}}_addr;
flags interval;
auto-merge;
{% let elems = set_elements(suf, af); if (length(elems)): %}
elements = {
{% for (let i = 0; i < length(elems); i++): %}
{{ elems[i] }}{% if (i < length(elems) - 1): %},{% endif %}{% print("\n") %}
{% endfor %}
}
{% endif %}
}
{% endfor; endfor %}
|
281677160/openwrt-package
| 3,215
|
luci-app-ssr-plus/shadowsocks-libev/files/ss-rules/chain.uc
|
{%
function get_local_verdict() {
let v = o_local_default;
if (v == "checkdst") {
return "goto ss_rules_dst_" + proto;
} else if (v == "forward") {
return "goto ss_rules_forward_" + proto;
} else {
return null;
}
}
function get_src_default_verdict() {
let v = o_src_default;
if (v == "checkdst") {
return "goto ss_rules_dst_" + proto;
} else if (v == "forward") {
return "goto ss_rules_forward_" + proto;
} else {
return "accept";
}
}
function get_dst_default_verdict() {
let v = o_dst_default;
if (v == "forward") {
return "goto ss_rules_forward_" + proto;
} else {
return "accept";
}
}
function get_ifnames() {
let res = [];
for (let ifname in split(o_ifnames, /[ \t\n]/)) {
ifname = trim(ifname);
if (ifname) push(res, ifname);
}
return res;
}
let type, hook, priority, redir_port;
if (proto == "tcp") {
type = "nat";
hook = "prerouting";
priority = -1;
redir_port = o_redir_tcp_port;
} else if (proto == "udp") {
type = "filter";
hook = "prerouting";
priority = "mangle";
redir_port = o_redir_udp_port;
if (system("
set -o errexit
iprr() {
while ip $1 rule del fwmark 1 lookup 100 2>/dev/null; do true; done
ip $1 rule add fwmark 1 lookup 100
ip $1 route flush table 100 2>/dev/null || true
ip $1 route add local default dev lo table 100
}
iprr -4
iprr -6
") != 0) {
return ;
}
} else {
return;
}
%}
{% if (redir_port): %}
chain ss_rules_pre_{{ proto }} {
type {{ type }} hook {{ hook }} priority {{ priority }};
meta l4proto {{ proto }}{%- let ifnames=get_ifnames(); if (length(ifnames)): %} iifname { {{join(", ", ifnames)}} }{% endif %} goto ss_rules_pre_src_{{ proto }};
}
chain ss_rules_pre_src_{{ proto }} {
ip daddr @ss_rules_dst_bypass_ accept;
ip6 daddr @ss_rules6_dst_bypass_ accept;
goto ss_rules_src_{{ proto }};
}
chain ss_rules_src_{{ proto }} {
ip saddr @ss_rules_src_bypass accept;
ip saddr @ss_rules_src_forward goto ss_rules_forward_{{ proto }};
ip saddr @ss_rules_src_checkdst goto ss_rules_dst_{{ proto }};
ip6 saddr @ss_rules6_src_bypass accept;
ip6 saddr @ss_rules6_src_forward goto ss_rules_forward_{{ proto }};
ip6 saddr @ss_rules6_src_checkdst goto ss_rules_dst_{{ proto }};
{{ get_src_default_verdict() }};
}
chain ss_rules_dst_{{ proto }} {
ip daddr @ss_rules_dst_bypass accept;
ip daddr @ss_rules_dst_forward goto ss_rules_forward_{{ proto }};
ip6 daddr @ss_rules6_dst_bypass accept;
ip6 daddr @ss_rules6_dst_forward goto ss_rules_forward_{{ proto }};
{{ get_dst_default_verdict() }};
}
{% if (proto == "tcp"): %}
chain ss_rules_forward_{{ proto }} {
meta l4proto tcp {{ o_nft_tcp_extra }} redirect to :{{ redir_port }};
}
{% let local_verdict = get_local_verdict(); if (local_verdict): %}
chain ss_rules_local_out {
type {{ type }} hook output priority -1;
meta l4proto != tcp accept;
ip daddr @ss_rules_dst_bypass_ accept;
ip daddr @ss_rules_dst_bypass accept;
ip6 daddr @ss_rules6_dst_bypass_ accept;
ip6 daddr @ss_rules6_dst_bypass accept;
{{ local_verdict }};
}
{% endif %}
{% elif (proto == "udp"): %}
chain ss_rules_forward_{{ proto }} {
meta l4proto udp {{ o_nft_udp_extra }} meta mark set 1 tproxy to :{{ redir_port }};
}
{% endif %}
{% endif %}
|
28harishkumar/blog
| 2,656
|
resources/assets/less/bootstrap/scaffolding.less
|
//
// Scaffolding
// --------------------------------------------------
// Reset the box-sizing
//
// Heads up! This reset may cause conflicts with some third-party widgets.
// For recommendations on resolving such conflicts, see
// http://getbootstrap.com/getting-started/#third-box-sizing
* {
.box-sizing(border-box);
}
*:before,
*:after {
.box-sizing(border-box);
}
// Body reset
html {
font-size: 10px;
-webkit-tap-highlight-color: rgba(0,0,0,0);
}
body {
font-family: @font-family-base;
font-size: @font-size-base;
line-height: @line-height-base;
color: @text-color;
background-color: @body-bg;
}
// Reset fonts for relevant elements
input,
button,
select,
textarea {
font-family: inherit;
font-size: inherit;
line-height: inherit;
}
// Links
a {
color: @link-color;
text-decoration: none;
&:hover,
&:focus {
color: @link-hover-color;
text-decoration: @link-hover-decoration;
}
&:focus {
.tab-focus();
}
}
// Figures
//
// We reset this here because previously Normalize had no `figure` margins. This
// ensures we don't break anyone's use of the element.
figure {
margin: 0;
}
// Images
img {
vertical-align: middle;
}
// Responsive images (ensure images don't scale beyond their parents)
.img-responsive {
.img-responsive();
}
// Rounded corners
.img-rounded {
border-radius: @border-radius-large;
}
// Image thumbnails
//
// Heads up! This is mixin-ed into thumbnails.less for `.thumbnail`.
.img-thumbnail {
padding: @thumbnail-padding;
line-height: @line-height-base;
background-color: @thumbnail-bg;
border: 1px solid @thumbnail-border;
border-radius: @thumbnail-border-radius;
.transition(all .2s ease-in-out);
// Keep them at most 100% wide
.img-responsive(inline-block);
}
// Perfect circle
.img-circle {
border-radius: 50%; // set radius in percents
}
// Horizontal rules
hr {
margin-top: @line-height-computed;
margin-bottom: @line-height-computed;
border: 0;
border-top: 1px solid @hr-border;
}
// Only display content to screen readers
//
// See: http://a11yproject.com/posts/how-to-hide-content/
.sr-only {
position: absolute;
width: 1px;
height: 1px;
margin: -1px;
padding: 0;
overflow: hidden;
clip: rect(0,0,0,0);
border: 0;
}
// Use in conjunction with .sr-only to only display content when it's focused.
// Useful for "Skip to main content" links; see http://www.w3.org/TR/2013/NOTE-WCAG20-TECHS-20130905/G1
// Credit: HTML5 Boilerplate
.sr-only-focusable {
&:active,
&:focus {
position: static;
width: auto;
height: auto;
margin: 0;
overflow: visible;
clip: auto;
}
}
|
2977094657/BilibiliHistoryFetcher
| 6,626
|
scripts/send_log_email.py
|
import os
import smtplib
from datetime import datetime
from email.header import Header
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from typing import Optional, Dict
from loguru import logger
from scripts.utils import load_config, setup_logger, get_logs_path
# 确保日志系统已初始化
setup_logger()
def get_task_execution_logs() -> str:
"""
获取最近一次计划任务执行期间的完整日志内容
从当前日志文件中查找最近一次任务执行的日志,
包含从任务开始到文件结尾的所有日志内容。
Returns:
str: 最近一次计划任务执行的完整日志内容,如果没有找到则返回提示信息
"""
# 获取当前日志文件路径
log_file = get_logs_path()
# 检查日志文件是否存在
if not os.path.exists(log_file):
return "今日暂无日志记录"
with open(log_file, 'r', encoding='utf-8') as f:
log_lines = f.readlines()
# 如果日志为空
if not log_lines:
return "今日暂无日志记录"
# 查找最近一次计划任务执行的开始位置
start_index = -1
end_index = len(log_lines) # 默认到文件末尾
# 计划任务开始的标记
task_start_markers = [
"=== 执行任务链:", # 主任务链开始
"=== 执行任务:", # 单个任务开始
"=== 调度器触发任务执行" # 调度器触发的任务
]
# 从后向前查找最近的任务执行开始标记
for i in range(len(log_lines) - 1, -1, -1):
line = log_lines[i]
if any(marker in line for marker in task_start_markers):
start_index = i
break
# 如果找不到任务执行开始标记,则返回提示信息
if start_index == -1:
return "未找到任务执行记录"
# 提取任务执行期间的日志 - 从开始标记一直到文件结束
task_logs = log_lines[start_index:end_index]
return "".join(task_logs)
async def send_email(subject: str, content: Optional[str] = None, to_email: Optional[str] = None) -> Dict:
"""
发送邮件
Args:
subject: 邮件主题
content: 邮件内容,如果为None则发送当天的任务执行日志
to_email: 收件人邮箱,如果为None则使用配置文件中的默认收件人
Returns:
dict: 发送结果,包含status和message
"""
logger.info(f"准备发送邮件: {subject}")
try:
config = load_config()
smtp_server = config.get('email', {}).get('smtp_server', 'smtp.qq.com')
smtp_port = config.get('email', {}).get('smtp_port', 587)
sender_email = config.get('email', {}).get('sender')
sender_password = config.get('email', {}).get('password')
receiver_email = to_email or config.get('email', {}).get('receiver')
if not all([sender_email, sender_password, receiver_email]):
logger.error("邮件配置不完整,请检查配置文件")
raise ValueError("邮件配置不完整,请检查配置文件")
# 如果没有提供内容,则获取任务执行期间的日志
if content is None:
content = get_task_execution_logs()
# 格式化主题(替换时间占位符)
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
subject = subject.format(current_time=current_time)
# 创建邮件对象
message = MIMEMultipart()
message['From'] = Header(sender_email)
message['To'] = Header(receiver_email)
message['Subject'] = Header(subject)
# 添加邮件内容
message.attach(MIMEText(content, 'plain', 'utf-8'))
# 连接SMTP服务器并发送
server = None
email_sent = False
try:
# 不使用 with 语句,以便更好地控制异常处理流程
# 支持本地邮件服务和mailrise转发,自动检测SSL支持
server = smtplib.SMTP(smtp_server, smtp_port, timeout=30)
# 自动检测是否支持STARTTLS
use_starttls = False
try:
# 先尝试使用STARTTLS(适用于QQ邮箱等标准SMTP服务)
server.starttls()
use_starttls = True
logger.info("STARTTLS连接成功")
except smtplib.SMTPNotSupportedError:
# 如果服务器不支持STARTTLS(如本地mailrise),继续使用明文连接
logger.info("服务器不支持STARTTLS,使用明文连接")
except Exception as e:
# 如果STARTTLS失败,尝试明文连接
logger.warning(f"STARTTLS失败,尝试明文连接: {str(e)}")
# 智能认证:先尝试认证,失败则判断是否需要认证
try:
# 直接尝试登录认证(适用于大部分标准SMTP服务器)
server.login(sender_email, sender_password)
logger.info("身份认证成功")
except smtplib.SMTPNotSupportedError:
# 服务器明确不支持认证
logger.info("服务器不支持身份认证,跳过认证步骤")
except smtplib.SMTPAuthenticationError as e:
# 认证失败,可能是凭据错误或其他问题
logger.error(f"身份认证失败: {str(e)}")
raise Exception(f"身份认证失败: {str(e)}")
except Exception as e:
# 检查是否是"需要认证"的错误
error_str = str(e).lower()
if any(keyword in error_str for keyword in ['auth', 'authentication', 'login', 'need ehlo']):
# 明确要求认证的错误,不应该跳过
logger.error(f"服务器要求身份认证但认证失败: {str(e)}")
raise Exception(f"服务器要求身份认证: {str(e)}")
else:
# 其他类型错误,可能是不需要认证的服务器
logger.warning(f"认证过程出错,尝试无认证方式: {str(e)}")
server.send_message(message)
email_sent = True # 标记邮件已成功发送
# 邮件发送成功后立即关闭连接
server.quit()
except smtplib.SMTPException as e:
raise Exception(f"SMTP错误: {str(e)}")
except TimeoutError:
raise Exception("SMTP服务器连接超时")
except Exception as e:
# 如果发生任何异常,尝试安全关闭连接
if server:
try:
server.quit()
except:
# 忽略关闭连接时的任何异常,因为主要异常更重要
pass
raise e
# 如果执行到这里,说明邮件发送成功且连接正常关闭
logger.info(f"邮件发送成功: {subject}")
return {"status": "success", "message": "邮件发送成功"}
except Exception as e:
error_msg = f"邮件发送失败: {str(e)}"
logger.error(f"邮件发送失败: {str(e)}")
# 检查特定的错误情况,如 \x00\x00\x00,这可能表示邮件实际已发送
if "\\x00\\x00\\x00" in str(e):
logger.info("邮件可能已成功发送(出现特殊错误码但通常不影响邮件传递)")
return {"status": "success", "message": "邮件可能已成功发送(出现特殊错误码但通常不影响邮件传递)"}
return {"status": "error", "message": error_msg}
def get_today_logs():
"""
获取今日全部日志内容
从当前日志文件获取所有日志行
Returns:
list: 今日的全部日志行
"""
# 获取当前日志文件路径
log_file = get_logs_path()
logs = []
# 检查今天的日志文件
if os.path.exists(log_file):
with open(log_file, 'r', encoding='utf-8') as f:
logs = f.read().splitlines()
return logs
# 测试代码
if __name__ == '__main__':
import asyncio
async def test_send():
try:
await send_email(
subject="测试日志邮件",
content=None # 测试发送当天的日志
)
print("测试邮件发送成功")
except Exception as e:
print(f"测试邮件发送失败: {e}")
asyncio.run(test_send())
|
28harishkumar/blog
| 4,388
|
resources/assets/less/bootstrap/mixins/gradients.less
|
// Gradients
#gradient {
// Horizontal gradient, from left to right
//
// Creates two color stops, start and end, by specifying a color and position for each color stop.
// Color stops are not available in IE9 and below.
.horizontal(@start-color: #555; @end-color: #333; @start-percent: 0%; @end-percent: 100%) {
background-image: -webkit-linear-gradient(left, @start-color @start-percent, @end-color @end-percent); // Safari 5.1-6, Chrome 10+
background-image: -o-linear-gradient(left, @start-color @start-percent, @end-color @end-percent); // Opera 12
background-image: linear-gradient(to right, @start-color @start-percent, @end-color @end-percent); // Standard, IE10, Firefox 16+, Opera 12.10+, Safari 7+, Chrome 26+
background-repeat: repeat-x;
filter: e(%("progid:DXImageTransform.Microsoft.gradient(startColorstr='%d', endColorstr='%d', GradientType=1)",argb(@start-color),argb(@end-color))); // IE9 and down
}
// Vertical gradient, from top to bottom
//
// Creates two color stops, start and end, by specifying a color and position for each color stop.
// Color stops are not available in IE9 and below.
.vertical(@start-color: #555; @end-color: #333; @start-percent: 0%; @end-percent: 100%) {
background-image: -webkit-linear-gradient(top, @start-color @start-percent, @end-color @end-percent); // Safari 5.1-6, Chrome 10+
background-image: -o-linear-gradient(top, @start-color @start-percent, @end-color @end-percent); // Opera 12
background-image: linear-gradient(to bottom, @start-color @start-percent, @end-color @end-percent); // Standard, IE10, Firefox 16+, Opera 12.10+, Safari 7+, Chrome 26+
background-repeat: repeat-x;
filter: e(%("progid:DXImageTransform.Microsoft.gradient(startColorstr='%d', endColorstr='%d', GradientType=0)",argb(@start-color),argb(@end-color))); // IE9 and down
}
.directional(@start-color: #555; @end-color: #333; @deg: 45deg) {
background-repeat: repeat-x;
background-image: -webkit-linear-gradient(@deg, @start-color, @end-color); // Safari 5.1-6, Chrome 10+
background-image: -o-linear-gradient(@deg, @start-color, @end-color); // Opera 12
background-image: linear-gradient(@deg, @start-color, @end-color); // Standard, IE10, Firefox 16+, Opera 12.10+, Safari 7+, Chrome 26+
}
.horizontal-three-colors(@start-color: #00b3ee; @mid-color: #7a43b6; @color-stop: 50%; @end-color: #c3325f) {
background-image: -webkit-linear-gradient(left, @start-color, @mid-color @color-stop, @end-color);
background-image: -o-linear-gradient(left, @start-color, @mid-color @color-stop, @end-color);
background-image: linear-gradient(to right, @start-color, @mid-color @color-stop, @end-color);
background-repeat: no-repeat;
filter: e(%("progid:DXImageTransform.Microsoft.gradient(startColorstr='%d', endColorstr='%d', GradientType=1)",argb(@start-color),argb(@end-color))); // IE9 and down, gets no color-stop at all for proper fallback
}
.vertical-three-colors(@start-color: #00b3ee; @mid-color: #7a43b6; @color-stop: 50%; @end-color: #c3325f) {
background-image: -webkit-linear-gradient(@start-color, @mid-color @color-stop, @end-color);
background-image: -o-linear-gradient(@start-color, @mid-color @color-stop, @end-color);
background-image: linear-gradient(@start-color, @mid-color @color-stop, @end-color);
background-repeat: no-repeat;
filter: e(%("progid:DXImageTransform.Microsoft.gradient(startColorstr='%d', endColorstr='%d', GradientType=0)",argb(@start-color),argb(@end-color))); // IE9 and down, gets no color-stop at all for proper fallback
}
.radial(@inner-color: #555; @outer-color: #333) {
background-image: -webkit-radial-gradient(circle, @inner-color, @outer-color);
background-image: radial-gradient(circle, @inner-color, @outer-color);
background-repeat: no-repeat;
}
.striped(@color: rgba(255,255,255,.15); @angle: 45deg) {
background-image: -webkit-linear-gradient(@angle, @color 25%, transparent 25%, transparent 50%, @color 50%, @color 75%, transparent 75%, transparent);
background-image: -o-linear-gradient(@angle, @color 25%, transparent 25%, transparent 50%, @color 50%, @color 75%, transparent 75%, transparent);
background-image: linear-gradient(@angle, @color 25%, transparent 25%, transparent 50%, @color 50%, @color 75%, transparent 75%, transparent);
}
}
|
28harishkumar/blog
| 2,784
|
resources/assets/less/bootstrap/mixins/grid-framework.less
|
// Framework grid generation
//
// Used only by Bootstrap to generate the correct number of grid classes given
// any value of `@grid-columns`.
.make-grid-columns() {
// Common styles for all sizes of grid columns, widths 1-12
.col(@index) { // initial
@item: ~".col-xs-@{index}, .col-sm-@{index}, .col-md-@{index}, .col-lg-@{index}";
.col((@index + 1), @item);
}
.col(@index, @list) when (@index =< @grid-columns) { // general; "=<" isn't a typo
@item: ~".col-xs-@{index}, .col-sm-@{index}, .col-md-@{index}, .col-lg-@{index}";
.col((@index + 1), ~"@{list}, @{item}");
}
.col(@index, @list) when (@index > @grid-columns) { // terminal
@{list} {
position: relative;
// Prevent columns from collapsing when empty
min-height: 1px;
// Inner gutter via padding
padding-left: (@grid-gutter-width / 2);
padding-right: (@grid-gutter-width / 2);
}
}
.col(1); // kickstart it
}
.float-grid-columns(@class) {
.col(@index) { // initial
@item: ~".col-@{class}-@{index}";
.col((@index + 1), @item);
}
.col(@index, @list) when (@index =< @grid-columns) { // general
@item: ~".col-@{class}-@{index}";
.col((@index + 1), ~"@{list}, @{item}");
}
.col(@index, @list) when (@index > @grid-columns) { // terminal
@{list} {
float: left;
}
}
.col(1); // kickstart it
}
.calc-grid-column(@index, @class, @type) when (@type = width) and (@index > 0) {
.col-@{class}-@{index} {
width: percentage((@index / @grid-columns));
}
}
.calc-grid-column(@index, @class, @type) when (@type = push) and (@index > 0) {
.col-@{class}-push-@{index} {
left: percentage((@index / @grid-columns));
}
}
.calc-grid-column(@index, @class, @type) when (@type = push) and (@index = 0) {
.col-@{class}-push-0 {
left: auto;
}
}
.calc-grid-column(@index, @class, @type) when (@type = pull) and (@index > 0) {
.col-@{class}-pull-@{index} {
right: percentage((@index / @grid-columns));
}
}
.calc-grid-column(@index, @class, @type) when (@type = pull) and (@index = 0) {
.col-@{class}-pull-0 {
right: auto;
}
}
.calc-grid-column(@index, @class, @type) when (@type = offset) {
.col-@{class}-offset-@{index} {
margin-left: percentage((@index / @grid-columns));
}
}
// Basic looping in LESS
.loop-grid-columns(@index, @class, @type) when (@index >= 0) {
.calc-grid-column(@index, @class, @type);
// next iteration
.loop-grid-columns((@index - 1), @class, @type);
}
// Create grid for specific class
.make-grid(@class) {
.float-grid-columns(@class);
.loop-grid-columns(@grid-columns, @class, width);
.loop-grid-columns(@grid-columns, @class, pull);
.loop-grid-columns(@grid-columns, @class, push);
.loop-grid-columns(@grid-columns, @class, offset);
}
|
2977094657/BilibiliHistoryFetcher
| 64,639
|
scripts/bilibili_history.py
|
import json
import logging
import os
import time
import sqlite3
import asyncio
import concurrent.futures
import random
import string
from datetime import datetime, timedelta
import requests
from scripts.utils import load_config, get_base_path, get_output_path
# 导入获取视频详情的函数
from routers.download import get_video_info
config = load_config()
def load_cookie():
"""从配置文件读取 SESSDATA"""
print("\n=== 读取 Cookie 配置 ===")
# 重新加载配置文件,确保获取最新的SESSDATA
current_config = load_config()
print(f"配置内容: {current_config}")
sessdata = current_config.get('SESSDATA', '')
if not sessdata:
print("警告: 配置文件中未找到 SESSDATA")
return ''
# 移除可能存在的引号
sessdata = sessdata.strip('"')
if not sessdata:
print("警告: SESSDATA 为空")
return ''
print(f"获取到的 SESSDATA: {sessdata}")
return sessdata
def find_latest_local_history(base_folder='history_by_date'):
"""查找本地最新的历史记录"""
print("正在查找本地最新的历史记录...")
full_base_folder = get_output_path(base_folder) # 使用 get_output_path
print(f"\n=== 查找历史记录 ===")
print(f"查找路径: {full_base_folder}")
print(f"路径存在: {os.path.exists(full_base_folder)}")
if not os.path.exists(full_base_folder):
print("本地历史记录文件夹不存在,将从头开始同步。")
return None
latest_date = None
try:
latest_year = max([int(year) for year in os.listdir(full_base_folder) if year.isdigit()], default=None)
if latest_year:
latest_month = max(
[int(month) for month in os.listdir(os.path.join(full_base_folder, str(latest_year))) if month.isdigit()],
default=None
)
if latest_month:
latest_day = max([
int(day.split('.')[0]) for day in
os.listdir(os.path.join(full_base_folder, str(latest_year), f"{latest_month:02}"))
if day.endswith('.json')
], default=None)
if latest_day:
latest_file = os.path.join(full_base_folder, str(latest_year), f"{latest_month:02}",
f"{latest_day:02}.json")
print(f"找到最新历史记录文件: {latest_file}")
with open(latest_file, 'r', encoding='utf-8') as f:
data = json.load(f)
latest_date = datetime.fromtimestamp(data[-1]['view_at']).date()
except ValueError:
print("历史记录目录格式不正确,可能尚未创建任何文件。")
if latest_date:
print(f"本地最新的观看日期: {latest_date}")
return latest_date
def save_history(history_data, base_folder='history_by_date'):
"""保存历史记录"""
logging.info(f"开始保存{len(history_data)}条新历史记录...")
full_base_folder = get_output_path(base_folder)
saved_count = 0
print(f"\n=== 保存历史记录 ===")
print(f"保存路径: {full_base_folder}")
for entry in history_data:
timestamp = entry['view_at']
dt_object = datetime.fromtimestamp(timestamp)
year = dt_object.strftime('%Y')
month = dt_object.strftime('%m')
day = dt_object.strftime('%d')
folder_path = os.path.join(full_base_folder, year, month)
os.makedirs(folder_path, exist_ok=True)
file_path = os.path.join(folder_path, f"{day}.json")
existing_records = set() # 使用集合存储bvid和view_at的组合
if os.path.exists(file_path):
try:
# 尝试不同的编码方式读取
for encoding in ['utf-8', 'gbk', 'utf-8-sig']:
try:
with open(file_path, 'r', encoding=encoding) as f:
daily_data = json.load(f)
# 将bvid和view_at组合作为唯一标识
existing_records = {
(item['history']['bvid'], item['view_at'])
for item in daily_data
}
break
except UnicodeDecodeError:
continue
except json.JSONDecodeError:
continue
except Exception as e:
logging.warning(f"警告: 读取文件 {file_path} 失败: {e},将创建新文件")
daily_data = []
else:
daily_data = []
# 检查当前记录的bvid和view_at组合是否已存在
current_record = (entry['history']['bvid'], entry['view_at'])
if current_record not in existing_records:
daily_data.append(entry)
existing_records.add(current_record)
saved_count += 1
# 保存时使用 utf-8 编码
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(daily_data, f, ensure_ascii=False, indent=4)
logging.info(f"历史记录保存完成,共保存了{saved_count}条新记录。")
return {"status": "success", "message": f"历史记录获取成功", "data": history_data}
def save_video_details(video_data):
"""将视频详细信息保存到新数据库"""
try:
# 设置数据库路径
db_path = get_output_path("video_library.db")
print(f"视频库数据库路径: {db_path}")
# 创建连接
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# 如果不存在,创建主表
cursor.execute('''
CREATE TABLE IF NOT EXISTS video_details (
id INTEGER PRIMARY KEY,
bvid TEXT UNIQUE,
aid INTEGER,
videos INTEGER,
tid INTEGER,
tid_v2 INTEGER,
tname TEXT,
tname_v2 TEXT,
copyright INTEGER,
pic TEXT,
title TEXT,
pubdate INTEGER,
ctime INTEGER,
desc TEXT,
state INTEGER,
duration INTEGER,
-- rights信息
rights_bp INTEGER,
rights_elec INTEGER,
rights_download INTEGER,
rights_movie INTEGER,
rights_pay INTEGER,
rights_hd5 INTEGER,
rights_no_reprint INTEGER,
rights_autoplay INTEGER,
rights_ugc_pay INTEGER,
rights_is_cooperation INTEGER,
rights_ugc_pay_preview INTEGER,
rights_no_background INTEGER,
rights_clean_mode INTEGER,
rights_is_stein_gate INTEGER,
rights_is_360 INTEGER,
rights_no_share INTEGER,
rights_arc_pay INTEGER,
rights_free_watch INTEGER,
-- owner信息
owner_mid INTEGER,
owner_name TEXT,
owner_face TEXT,
-- stat信息
stat_view INTEGER,
stat_danmaku INTEGER,
stat_reply INTEGER,
stat_favorite INTEGER,
stat_coin INTEGER,
stat_share INTEGER,
stat_now_rank INTEGER,
stat_his_rank INTEGER,
stat_like INTEGER,
stat_dislike INTEGER,
-- argue_info
argue_msg TEXT,
argue_type INTEGER,
argue_link TEXT,
-- 其他信息
dynamic TEXT,
cid INTEGER,
dimension_width INTEGER,
dimension_height INTEGER,
dimension_rotate INTEGER,
teenage_mode INTEGER,
is_chargeable_season INTEGER,
is_story INTEGER,
is_upower_exclusive INTEGER,
is_upower_play INTEGER,
is_upower_preview INTEGER,
enable_vt INTEGER,
vt_display TEXT,
is_upower_exclusive_with_qa INTEGER,
no_cache INTEGER,
-- 字幕信息
subtitle_allow_submit INTEGER,
-- 标签信息
label_type INTEGER,
-- 季节信息
is_season_display INTEGER,
-- 点赞信息
like_icon TEXT,
-- 其他布尔信息
need_jump_bv INTEGER,
disable_show_up_info INTEGER,
is_story_play INTEGER,
is_view_self INTEGER,
-- 添加时间
add_time INTEGER
)
''')
# 创建视频分P表
cursor.execute('''
CREATE TABLE IF NOT EXISTS video_pages (
id INTEGER PRIMARY KEY,
video_bvid TEXT,
cid INTEGER,
page INTEGER,
from_source TEXT,
part TEXT,
duration INTEGER,
vid TEXT,
weblink TEXT,
dimension_width INTEGER,
dimension_height INTEGER,
dimension_rotate INTEGER,
first_frame TEXT,
ctime INTEGER,
FOREIGN KEY (video_bvid) REFERENCES video_details (bvid)
)
''')
# 创建视频staff表
cursor.execute('''
CREATE TABLE IF NOT EXISTS video_staff (
id INTEGER PRIMARY KEY,
video_bvid TEXT,
mid INTEGER,
title TEXT,
name TEXT,
face TEXT,
vip_type INTEGER,
vip_status INTEGER,
official_role INTEGER,
official_title TEXT,
official_desc TEXT,
follower INTEGER,
FOREIGN KEY (video_bvid) REFERENCES video_details (bvid)
)
''')
# 创建字幕表
cursor.execute('''
CREATE TABLE IF NOT EXISTS video_subtitles (
id INTEGER PRIMARY KEY,
video_bvid TEXT,
subtitle_id TEXT,
lan TEXT,
lan_doc TEXT,
is_lock INTEGER,
subtitle_url TEXT,
type INTEGER,
ai_type INTEGER,
ai_status INTEGER,
FOREIGN KEY (video_bvid) REFERENCES video_details (bvid)
)
''')
# 创建荣誉列表表
cursor.execute('''
CREATE TABLE IF NOT EXISTS video_honors (
id INTEGER PRIMARY KEY,
video_bvid TEXT,
aid INTEGER,
type INTEGER,
desc TEXT,
weekly_recommend_num INTEGER,
FOREIGN KEY (video_bvid) REFERENCES video_details (bvid)
)
''')
# 检查是否已存在相同的视频
cursor.execute("SELECT id FROM video_details WHERE bvid = ?", (video_data['bvid'],))
existing = cursor.fetchone()
current_time = int(time.time())
bvid = video_data.get('bvid', '')
# 准备数据
video_info = {}
# 基本信息
video_info['bvid'] = bvid
video_info['aid'] = video_data.get('aid', 0)
video_info['videos'] = video_data.get('videos', 0)
video_info['tid'] = video_data.get('tid', 0)
video_info['tid_v2'] = video_data.get('tid_v2', 0)
video_info['tname'] = video_data.get('tname', '')
video_info['tname_v2'] = video_data.get('tname_v2', '')
video_info['copyright'] = video_data.get('copyright', 0)
video_info['pic'] = video_data.get('pic', '')
video_info['title'] = video_data.get('title', '')
video_info['pubdate'] = video_data.get('pubdate', 0)
video_info['ctime'] = video_data.get('ctime', 0)
video_info['desc'] = video_data.get('desc', '')
video_info['state'] = video_data.get('state', 0)
video_info['duration'] = video_data.get('duration', 0)
# rights信息
rights = video_data.get('rights', {})
video_info['rights_bp'] = rights.get('bp', 0)
video_info['rights_elec'] = rights.get('elec', 0)
video_info['rights_download'] = rights.get('download', 0)
video_info['rights_movie'] = rights.get('movie', 0)
video_info['rights_pay'] = rights.get('pay', 0)
video_info['rights_hd5'] = rights.get('hd5', 0)
video_info['rights_no_reprint'] = rights.get('no_reprint', 0)
video_info['rights_autoplay'] = rights.get('autoplay', 0)
video_info['rights_ugc_pay'] = rights.get('ugc_pay', 0)
video_info['rights_is_cooperation'] = rights.get('is_cooperation', 0)
video_info['rights_ugc_pay_preview'] = rights.get('ugc_pay_preview', 0)
video_info['rights_no_background'] = rights.get('no_background', 0)
video_info['rights_clean_mode'] = rights.get('clean_mode', 0)
video_info['rights_is_stein_gate'] = rights.get('is_stein_gate', 0)
video_info['rights_is_360'] = rights.get('is_360', 0)
video_info['rights_no_share'] = rights.get('no_share', 0)
video_info['rights_arc_pay'] = rights.get('arc_pay', 0)
video_info['rights_free_watch'] = rights.get('free_watch', 0)
# owner信息
owner = video_data.get('owner', {})
video_info['owner_mid'] = owner.get('mid', 0)
video_info['owner_name'] = owner.get('name', '')
video_info['owner_face'] = owner.get('face', '')
# stat信息
stat = video_data.get('stat', {})
video_info['stat_view'] = stat.get('view', 0)
video_info['stat_danmaku'] = stat.get('danmaku', 0)
video_info['stat_reply'] = stat.get('reply', 0)
video_info['stat_favorite'] = stat.get('favorite', 0)
video_info['stat_coin'] = stat.get('coin', 0)
video_info['stat_share'] = stat.get('share', 0)
video_info['stat_now_rank'] = stat.get('now_rank', 0)
video_info['stat_his_rank'] = stat.get('his_rank', 0)
video_info['stat_like'] = stat.get('like', 0)
video_info['stat_dislike'] = stat.get('dislike', 0)
# argue_info
argue_info = video_data.get('argue_info', {})
video_info['argue_msg'] = argue_info.get('argue_msg', '')
video_info['argue_type'] = argue_info.get('argue_type', 0)
video_info['argue_link'] = argue_info.get('argue_link', '')
# dynamic
video_info['dynamic'] = video_data.get('dynamic', '')
video_info['cid'] = video_data.get('cid', 0)
# dimension
dimension = video_data.get('dimension', {})
video_info['dimension_width'] = dimension.get('width', 0)
video_info['dimension_height'] = dimension.get('height', 0)
video_info['dimension_rotate'] = dimension.get('rotate', 0)
# 其他标志位
video_info['teenage_mode'] = video_data.get('teenage_mode', 0)
video_info['is_chargeable_season'] = 1 if video_data.get('is_chargeable_season', False) else 0
video_info['is_story'] = 1 if video_data.get('is_story', False) else 0
video_info['is_upower_exclusive'] = 1 if video_data.get('is_upower_exclusive', False) else 0
video_info['is_upower_play'] = 1 if video_data.get('is_upower_play', False) else 0
video_info['is_upower_preview'] = 1 if video_data.get('is_upower_preview', False) else 0
video_info['enable_vt'] = video_data.get('enable_vt', 0)
video_info['vt_display'] = video_data.get('vt_display', '')
video_info['is_upower_exclusive_with_qa'] = 1 if video_data.get('is_upower_exclusive_with_qa', False) else 0
video_info['no_cache'] = 1 if video_data.get('no_cache', False) else 0
# 字幕信息
subtitle = video_data.get('subtitle', {})
video_info['subtitle_allow_submit'] = 1 if subtitle.get('allow_submit', False) else 0
# 标签信息
label = video_data.get('label', {})
video_info['label_type'] = label.get('type', 0)
# 季节信息
video_info['is_season_display'] = 1 if video_data.get('is_season_display', False) else 0
# 点赞信息
video_info['like_icon'] = video_data.get('like_icon', '')
# 其他布尔信息
video_info['need_jump_bv'] = 1 if video_data.get('need_jump_bv', False) else 0
video_info['disable_show_up_info'] = 1 if video_data.get('disable_show_up_info', False) else 0
video_info['is_story_play'] = video_data.get('is_story_play', 0)
video_info['is_view_self'] = 1 if video_data.get('is_view_self', False) else 0
# 添加时间
video_info['add_time'] = current_time
if existing:
# 构建更新语句
update_fields = []
update_values = []
for key, value in video_info.items():
if key != 'bvid': # 不更新主键bvid
update_fields.append(f"{key} = ?")
update_values.append(value)
# 添加WHERE条件的值
update_values.append(video_info['bvid'])
# 执行更新
cursor.execute(
f"UPDATE video_details SET {', '.join(update_fields)} WHERE bvid = ?",
update_values
)
print(f"已更新视频信息: {video_info['title']} (BV号: {video_info['bvid']})")
# 删除相关的子表数据,以便重新插入
cursor.execute("DELETE FROM video_pages WHERE video_bvid = ?", (bvid,))
cursor.execute("DELETE FROM video_staff WHERE video_bvid = ?", (bvid,))
cursor.execute("DELETE FROM video_subtitles WHERE video_bvid = ?", (bvid,))
cursor.execute("DELETE FROM video_honors WHERE video_bvid = ?", (bvid,))
else:
# 构建插入语句
columns = list(video_info.keys())
placeholders = ['?'] * len(columns)
values = [video_info[key] for key in columns]
# 执行插入
cursor.execute(
f"INSERT INTO video_details ({', '.join(columns)}) VALUES ({', '.join(placeholders)})",
values
)
print(f"已添加新视频到库: {video_info['title']} (BV号: {video_info['bvid']})")
# 插入分P信息
pages = video_data.get('pages', [])
for page in pages:
page_dimension = page.get('dimension', {})
cursor.execute('''
INSERT INTO video_pages (
video_bvid, cid, page, from_source, part, duration, vid, weblink,
dimension_width, dimension_height, dimension_rotate, first_frame, ctime
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
bvid,
page.get('cid', 0),
page.get('page', 0),
page.get('from', ''),
page.get('part', ''),
page.get('duration', 0),
page.get('vid', ''),
page.get('weblink', ''),
page_dimension.get('width', 0),
page_dimension.get('height', 0),
page_dimension.get('rotate', 0),
page.get('first_frame', ''),
page.get('ctime', 0)
))
# 插入staff信息
staff_list = video_data.get('staff', [])
for staff in staff_list:
vip = staff.get('vip', {})
official = staff.get('official', {})
cursor.execute('''
INSERT INTO video_staff (
video_bvid, mid, title, name, face,
vip_type, vip_status, official_role, official_title, official_desc, follower
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
bvid,
staff.get('mid', 0),
staff.get('title', ''),
staff.get('name', ''),
staff.get('face', ''),
vip.get('type', 0),
vip.get('status', 0),
official.get('role', 0),
official.get('title', ''),
official.get('desc', ''),
staff.get('follower', 0)
))
# 插入字幕信息
subtitle_list = subtitle.get('list', [])
for sub in subtitle_list:
cursor.execute('''
INSERT INTO video_subtitles (
video_bvid, subtitle_id, lan, lan_doc, is_lock,
subtitle_url, type, ai_type, ai_status
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
bvid,
sub.get('id_str', ''),
sub.get('lan', ''),
sub.get('lan_doc', ''),
1 if sub.get('is_lock', False) else 0,
sub.get('subtitle_url', ''),
sub.get('type', 0),
sub.get('ai_type', 0),
sub.get('ai_status', 0)
))
# 插入荣誉信息
honor_reply = video_data.get('honor_reply', {})
honor_list = honor_reply.get('honor', [])
for honor in honor_list:
cursor.execute('''
INSERT INTO video_honors (
video_bvid, aid, type, desc, weekly_recommend_num
) VALUES (?, ?, ?, ?, ?)
''', (
bvid,
honor.get('aid', 0),
honor.get('type', 0),
honor.get('desc', ''),
honor.get('weekly_recommend_num', 0)
))
conn.commit()
return True
except Exception as e:
print(f"保存视频详情时出错: {e}")
import traceback
print(traceback.format_exc())
if 'conn' in locals() and conn:
conn.rollback()
return False
finally:
if 'conn' in locals() and conn:
conn.close()
# 添加一个函数,用于检查视频是否已经存在于视频库中
def is_video_exists(bvid):
"""检查视频是否已经存在于视频库中"""
try:
db_path = get_output_path("video_library.db")
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute("SELECT id FROM video_details WHERE bvid = ?", (bvid,))
result = cursor.fetchone()
conn.close()
return result is not None
except Exception as e:
print(f"检查视频是否存在时出错: {e}")
return False
def create_invalid_videos_table():
"""创建记录失效视频的数据库表"""
try:
db_path = get_output_path("video_library.db")
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# 创建失效视频表
cursor.execute('''
CREATE TABLE IF NOT EXISTS invalid_videos (
id INTEGER PRIMARY KEY,
bvid TEXT UNIQUE,
error_type TEXT,
error_code INTEGER,
error_message TEXT,
raw_response TEXT,
first_check_time INTEGER,
last_check_time INTEGER,
check_count INTEGER DEFAULT 1
)
''')
conn.commit()
conn.close()
print("成功创建或更新失效视频表")
return True
except Exception as e:
print(f"创建失效视频表时出错: {e}")
return False
def save_invalid_video(video_result):
"""保存失效视频记录到数据库"""
try:
# 获取视频信息
bvid = getattr(video_result, 'bvid', None)
if not bvid:
print("无法保存失效视频记录:缺少BV号")
return False
error_type = getattr(video_result, 'error_type', 'unknown')
error_code = getattr(video_result, 'error_code', None)
error_message = getattr(video_result, 'message', '')
raw_response = getattr(video_result, 'raw_response', None)
# 如果raw_response是字典,转换为JSON字符串
if isinstance(raw_response, dict):
raw_response = json.dumps(raw_response, ensure_ascii=False)
elif raw_response is None:
raw_response = ""
current_time = int(time.time())
# 连接数据库
db_path = get_output_path("video_library.db")
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# 确保表存在
create_invalid_videos_table()
# 检查是否已存在记录
cursor.execute("SELECT check_count, first_check_time FROM invalid_videos WHERE bvid = ?", (bvid,))
existing = cursor.fetchone()
if existing:
# 更新现有记录
check_count = existing[0] + 1
first_check_time = existing[1]
cursor.execute('''
UPDATE invalid_videos
SET error_type = ?,
error_code = ?,
error_message = ?,
raw_response = ?,
last_check_time = ?,
check_count = ?
WHERE bvid = ?
''', (error_type, error_code, error_message, raw_response, current_time, check_count, bvid))
print(f"更新失效视频记录: {bvid}, 错误类型: {error_type}, 检查次数: {check_count}")
else:
# 插入新记录
cursor.execute('''
INSERT INTO invalid_videos
(bvid, error_type, error_code, error_message, raw_response, first_check_time, last_check_time)
VALUES (?, ?, ?, ?, ?, ?, ?)
''', (bvid, error_type, error_code, error_message, raw_response, current_time, current_time))
print(f"添加新失效视频记录: {bvid}, 错误类型: {error_type}")
conn.commit()
conn.close()
return True
except Exception as e:
print(f"保存失效视频记录时出错: {e}")
import traceback
print(traceback.format_exc())
return False
def check_invalid_video(bvid):
"""检查视频是否已在失效视频表中"""
try:
db_path = get_output_path("video_library.db")
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute("SELECT id, error_type, last_check_time FROM invalid_videos WHERE bvid = ?", (bvid,))
result = cursor.fetchone()
conn.close()
if result:
# 如果在失效表中找到,返回错误类型和最后检查时间
return {
"is_invalid": True,
"error_type": result[1],
"last_check_time": result[2]
}
return {"is_invalid": False}
except Exception as e:
print(f"检查失效视频时出错: {e}")
return {"is_invalid": False}
# 修改get_video_info_sync函数,在JSON解析错误时保存并打印原始响应内容,并确保将这类错误也添加到失效表中
def get_video_info_sync(bvid, sessdata, skip_exists=False, use_sessdata=True):
"""同步版本的获取视频详情函数,供多线程使用"""
# 如果需要跳过已存在的视频,则先检查
if skip_exists and is_video_exists(bvid):
print(f"视频 {bvid} 已存在于数据库中,跳过获取")
return None
# 检查是否已知失效视频
invalid_check = check_invalid_video(bvid)
if invalid_check["is_invalid"]:
print(f"视频 {bvid} 已知失效,类型: {invalid_check['error_type']},最后检查时间: {datetime.fromtimestamp(invalid_check['last_check_time'])}")
return type('ErrorResponse', (), {
'status': 'error',
'message': f"已知失效视频 (类型: {invalid_check['error_type']})",
'data': None,
'bvid': bvid,
'error_type': invalid_check['error_type'],
'error_code': None,
'raw_response': None,
'is_known_invalid': True
})
# 随机延迟0.5-2秒,使请求看起来更像人类行为
delay = 0.5 + random.random() * 1.5
time.sleep(delay)
# 生成随机的buvid和其他cookie值
buvid3 = ''.join(random.choices(string.ascii_letters + string.digits, k=32))
buvid4 = ''.join(random.choices(string.ascii_letters + string.digits, k=32))
b_nut = str(int(time.time() * 1000))
# 随机化User-Agent
user_agents = [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:90.0) Gecko/20100101 Firefox/90.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36'
]
user_agent = random.choice(user_agents)
# 构建更完整的请求头
headers = {
'User-Agent': user_agent,
'Referer': f'https://www.bilibili.com/video/{bvid}',
'Accept': 'application/json, text/plain, */*',
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Connection': 'keep-alive',
'Origin': 'https://www.bilibili.com',
'Sec-Fetch-Site': 'same-site',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Dest': 'empty',
'Sec-Ch-Ua': '"Chromium";v="92", " Not A;Brand";v="99", "Google Chrome";v="92"',
'Sec-Ch-Ua-Mobile': '?0',
'Cookie': f'buvid3={buvid3}; buvid4={buvid4}; b_nut={b_nut}; bsource=search_google; _uuid=D{buvid3}-{b_nut}-{buvid4}'
}
# 如果存在SESSDATA并且需要使用,加入到Cookie中
if sessdata and use_sessdata:
headers['Cookie'] += f'; SESSDATA={sessdata}'
# 使用指数退避策略进行重试
max_retries = 3
last_response_text = None
last_error = None
for retry in range(max_retries):
try:
# 直接使用同步请求,避免事件循环嵌套问题
url = f"https://api.bilibili.com/x/web-interface/view?bvid={bvid}"
response = requests.get(url, headers=headers, timeout=20)
# 保存原始响应文本,以便错误时打印
last_response_text = response.text
# 检查响应状态码
if response.status_code == 412:
print(f"获取视频 {bvid} 的详情被服务器拒绝(412),等待后重试...")
print(f"原始响应: {last_response_text[:500]}...") # 打印部分响应内容
# 412错误时使用更长的指数退避延迟
retry_delay = (4 ** retry) + random.uniform(1, 5)
time.sleep(retry_delay)
continue
# 如果是其他错误状态码
if response.status_code != 200:
print(f"获取视频 {bvid} 的详情失败,HTTP状态码: {response.status_code}")
print(f"原始响应: {last_response_text[:500]}...") # 打印部分响应内容
return type('ErrorResponse', (), {
'status': 'error',
'message': f'HTTP错误 {response.status_code}: {response.reason}',
'data': None,
'bvid': bvid,
'error_type': 'http_error',
'error_code': response.status_code,
'raw_response': last_response_text
})
# 尝试解析JSON响应
try:
data = response.json()
except json.JSONDecodeError as json_err:
print(f"获取视频 {bvid} 时出现JSON解析错误: {str(json_err)}")
print(f"原始响应内容: {last_response_text[:500]}...") # 打印部分响应以便分析
# 将解析错误作为失效视频处理
error_response = type('ErrorResponse', (), {
'status': 'error',
'message': f'JSON解析错误: {str(json_err)}',
'data': None,
'bvid': bvid,
'error_type': 'parse_error',
'error_code': None,
'raw_response': last_response_text
})
# 保存到失效视频表
save_invalid_video(error_response)
return error_response
# 打印失效视频的响应数据,用于分析
if data.get('code') != 0:
print(f"视频 {bvid} 失效,B站返回数据: {json.dumps(data, ensure_ascii=False)}")
# 检查API错误码
if data.get('code') != 0:
error_code = data.get('code')
error_msg = data.get('message', '未知错误')
# 特定错误码的处理
if error_code == -404:
print(f"获取视频 {bvid} 的详情失败: 视频不存在或已被删除")
return type('ErrorResponse', (), {
'status': 'error',
'message': f'视频不存在或已被删除 (错误码: -404)',
'data': None,
'bvid': bvid,
'error_type': 'not_found',
'error_code': error_code,
'raw_response': data
})
elif error_code == 62002:
print(f"获取视频 {bvid} 的详情失败: 视频已设为私有或被隐藏")
return type('ErrorResponse', (), {
'status': 'error',
'message': f'视频已设为私有或被隐藏 (错误码: 62002)',
'data': None,
'bvid': bvid,
'error_type': 'invisible',
'error_code': error_code,
'raw_response': data
})
else:
print(f"获取视频 {bvid} 的详情失败: API错误 {error_code}: {error_msg}")
return type('ErrorResponse', (), {
'status': 'error',
'message': f'API错误 {error_code}: {error_msg}',
'data': None,
'bvid': bvid,
'error_type': 'api_error',
'error_code': error_code,
'raw_response': data
})
# 成功获取数据
return type('SuccessResponse', (), {
'status': 'success',
'message': '获取视频信息成功',
'data': data.get('data', {}),
'bvid': bvid
})
except requests.exceptions.RequestException as e:
# 请求异常,使用指数退避策略
last_error = str(e)
retry_delay = (2 ** retry) + random.uniform(0.5, 2)
print(f"获取视频 {bvid} 时出错: {e},{retry+1}/{max_retries}次重试,等待{retry_delay:.2f}秒")
if last_response_text:
print(f"上次响应内容: {last_response_text[:500]}...")
time.sleep(retry_delay)
except Exception as e:
# 其他异常
last_error = str(e)
retry_delay = (2 ** retry) + random.uniform(0.5, 2)
print(f"处理视频 {bvid} 时出错: {e},{retry+1}/{max_retries}次重试,等待{retry_delay:.2f}秒")
if last_response_text:
print(f"上次响应内容: {last_response_text[:500]}...")
time.sleep(retry_delay)
# 所有重试都失败后,创建通用错误响应
error_response = type('ErrorResponse', (), {
'status': 'error',
'message': f'获取视频 {bvid} 详情失败,已重试 {max_retries} 次: {last_error}',
'data': None,
'bvid': bvid,
'error_type': 'retry_exceeded',
'error_code': None,
'raw_response': last_response_text
})
# 将重试失败的也加入失效表,但标记为临时错误类型
if 'Expecting value' in str(last_error):
error_response.error_type = 'parse_error'
# 保存到失效视频表
save_invalid_video(error_response)
return error_response
# 批量保存视频详情,修改以处理失效视频
def batch_save_video_details(video_details_list):
"""批量保存多个视频的详情"""
success_count = 0
fail_count = 0
skipped_count = 0
invalid_count = 0
# 错误类型统计
error_stats = {
"404_not_found": 0, # 视频不存在或已删除
"62002_invisible": 0, # 视频已设为私有或被隐藏
"412_banned": 0, # 请求被禁止/拒绝
"decode_error": 0, # 解码错误
"parse_error": 0, # JSON解析错误
"empty_data": 0, # 数据为空
"save_error": 0, # 保存过程出错
"other_error": 0 # 其他错误
}
# 确保失效视频表已创建
create_invalid_videos_table()
for video_data in video_details_list:
if video_data is None:
# 跳过的视频,不计入成功或失败
skipped_count += 1
continue
# 处理各种失败情况
if not hasattr(video_data, 'status') or video_data.status != "success":
fail_count += 1
# 获取错误信息
error_msg = getattr(video_data, 'message', '未知错误') if hasattr(video_data, 'message') else '未知错误'
# 错误类型统计
if '404' in error_msg or '视频不存在' in error_msg:
error_stats["404_not_found"] += 1
elif '62002' in error_msg or '稿件不可见' in error_msg:
error_stats["62002_invisible"] += 1
elif '412' in error_msg or 'request was banned' in error_msg:
error_stats["412_banned"] += 1
elif 'decode' in error_msg or '解码' in error_msg:
error_stats["decode_error"] += 1
elif 'parse_error' in error_msg or 'JSON解析错误' in error_msg:
error_stats["parse_error"] += 1
else:
error_stats["other_error"] += 1
# 对于失效视频,保存到失效表中
if hasattr(video_data, 'error_type'):
error_type = getattr(video_data, 'error_type')
# 永久性错误类型,应保存到失效表
permanent_error_types = ['not_found', 'invisible', 'api_error', 'parse_error']
if error_type in permanent_error_types:
# 视频的永久性错误,将其保存到失效视频表
saved = save_invalid_video(video_data)
if saved:
invalid_count += 1
print(f"已将视频 {getattr(video_data, 'bvid', '未知')} 的错误信息保存到失效表,错误类型: {error_type}")
print(f"跳过保存视频详情:获取数据失败 - {error_msg}")
continue
if not hasattr(video_data, 'data') or not video_data.data:
fail_count += 1
error_stats["empty_data"] += 1
print("跳过保存视频详情:数据为空")
continue
# 尝试保存视频详情
try:
bvid = video_data.data.get('bvid', '未知BV号')
title = video_data.data.get('title', '未知标题')
result = save_video_details(video_data.data)
if result:
success_count += 1
print(f"成功保存视频: {title} ({bvid})")
else:
fail_count += 1
error_stats["save_error"] += 1
print(f"保存视频详情失败: {bvid}")
except Exception as e:
fail_count += 1
error_stats["save_error"] += 1
print(f"保存视频详情时发生异常: {str(e)}")
# 打印统计信息
print(f"\n=== 批量保存完成 ===")
print(f"成功:{success_count},失败:{fail_count},失效视频:{invalid_count},跳过:{skipped_count}")
# 输出错误类型统计
if fail_count > 0:
print("\n错误类型统计:")
for error_type, count in error_stats.items():
if count > 0:
print(f"- {error_type}: {count}次")
return {
"success": success_count,
"fail": fail_count,
"invalid": invalid_count,
"skipped": skipped_count,
"error_stats": error_stats
}
async def fetch_and_compare_history(cookie, latest_date, skip_exists=False, process_video_details=False):
"""
获取历史记录并与本地最新记录对比
Args:
cookie: 用户cookie中的SESSDATA
latest_date: 本地最新的记录日期,格式为YYYY-MM-DD
skip_exists: 是否跳过已存在的记录
process_video_details: 是否同时获取视频详情,默认为False
Returns:
dict: 新的历史记录,按日期分组
"""
api_url = "https://api.bilibili.com/x/v2/history"
print("\n=== API 请求信息 ===")
print(f"使用的 Cookie: {cookie}")
url = 'https://api.bilibili.com/x/web-interface/history/cursor'
# 添加更多必要的 cookie 字段
headers = {
'Cookie': f"SESSDATA={cookie}; buvid3=random_string; b_nut=1234567890; buvid4=random_string",
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
'Referer': 'https://www.bilibili.com',
'Origin': 'https://www.bilibili.com',
'Accept': 'application/json, text/plain, */*',
'Connection': 'keep-alive'
}
print(f"请求头: {headers}")
params = {
'ps': 30,
'max': '',
'view_at': '',
'business': '',
}
# 测试 API 连接
response = requests.get(url, headers=headers, params=params)
print(f"\n=== API 响应信息 ===")
print(f"状态码: {response.status_code}")
try:
response_data = response.json()
if response_data.get('code') == -101:
print("Cookie 已失效,请更新 SESSDATA")
return []
except:
print(f"响应内容: {response.text}")
all_new_data = []
all_video_ids = [] # 存储所有需要获取详情的视频ID
page_count = 0
last_view_at = None # 记录最后一条数据的时间
empty_page_count = 0 # 记录连续空页面的次数
max_empty_pages = 3 # 最大允许的连续空页面次数
if latest_date:
# 直接使用最新日期的时间戳作为停止条件
cutoff_timestamp = int(datetime.combine(latest_date, datetime.min.time()).timestamp())
print(f"设置停止条件:view_at <= {cutoff_timestamp} ({latest_date})")
else:
cutoff_timestamp = 0
print("没有本地数据,抓取所有可用的历史记录。")
while True:
page_count += 1
print(f"发送请求获取数据... (第{page_count}页)")
response = requests.get(url, headers=headers, params=params)
if response.status_code == 200:
try:
data = response.json()
if data['code'] != 0:
print(f"API请求失败,错误码: {data['code']}, 错误信息: {data['message']}")
break
if 'data' in data and 'list' in data['data']:
fetched_list = data['data']['list']
print(f"获取到{len(fetched_list)}条记录,进行对比...")
# 如果获取到0条记录,检查是否是因为到达了最后一页
if len(fetched_list) == 0:
empty_page_count += 1
print(f"连续获取到空页面 {empty_page_count}/{max_empty_pages}")
if empty_page_count >= max_empty_pages:
print(f"连续{max_empty_pages}次获取到空页面,停止请求。")
break
if 'cursor' in data['data']:
# 检查新的游标时间是否大于最后一条记录的时间
new_view_at = data['data']['cursor']['view_at']
current_max = data['data']['cursor']['max']
# 如果游标被重置(max变为0或很小的值),说明已经到达末尾
if current_max == 0 or (last_view_at and current_max < 1000000):
print(f"检测到游标重置(max={current_max}),停止请求。")
break
if last_view_at and new_view_at >= last_view_at:
print(f"检测到重复数据(当前游标时间 {new_view_at} >= 最后记录时间 {last_view_at}),停止请求。")
break
params['max'] = current_max
params['view_at'] = new_view_at
print(f"获取到空页,尝试继续请求。游标更新:max={params['max']}, view_at={params['view_at']}")
continue
else:
print("没有更多数据,停止请求。")
break
else:
# 重置空页面计数
empty_page_count = 0
# 更新最后一条记录的时间
if fetched_list:
last_view_at = fetched_list[-1]['view_at']
print(f"更新最后记录时间: {last_view_at}")
# 收集所有视频ID,先不进行API调用
for entry in fetched_list:
print(f"标题: {entry['title']}, 观看时间: {datetime.fromtimestamp(entry['view_at'])}")
# 从历史记录获取 bvid
bvid = entry['history'].get('bvid', '')
if bvid:
all_video_ids.append(bvid)
new_entries = []
should_stop = False
for entry in fetched_list:
view_at = entry['view_at']
if view_at > cutoff_timestamp:
new_entries.append(entry)
else:
should_stop = True
if new_entries:
all_new_data.extend(new_entries)
print(f"找到{len(new_entries)}条新记录。")
if should_stop:
print("达到停止条件,停止请求。")
break
if 'cursor' in data['data']:
current_max = data['data']['cursor']['max']
params['max'] = current_max
params['view_at'] = data['data']['cursor']['view_at']
print(f"请求游标更新:max={params['max']}, view_at={params['view_at']}")
else:
print("未能获取游标信息,停止请求。")
break
await asyncio.sleep(1)
else:
print("没有更多的数据或数据结构错误。")
break
except json.JSONDecodeError:
print("JSON Decode Error: 无法解析服务器响应")
break
else:
print(f"请求失败,状态码: {response.status_code}")
break
# 完成历史记录获取后,使用多线程获取视频详情
if all_video_ids and process_video_details:
print(f"\n=== 多线程获取视频详情 ===")
print(f"总共有 {len(all_video_ids)} 个视频需要获取详情")
# 去重
unique_video_ids = list(set(all_video_ids))
print(f"去重后有 {len(unique_video_ids)} 个不同的视频")
# 使用线程池并行获取视频详情 - 增加最大线程数到30
max_workers = min(30, len(unique_video_ids)) # 最多30个线程
print(f"使用 {max_workers} 个线程并行获取视频详情")
# 分批处理,避免一次性创建太多线程
batch_size = 30 # 增加批处理数量
results = []
for i in range(0, len(unique_video_ids), batch_size):
batch = unique_video_ids[i:i+batch_size]
print(f"处理第 {i//batch_size + 1} 批,共 {len(batch)} 个视频")
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
# 将每个视频ID分配给线程池,添加skip_exists参数
future_to_bvid = {
executor.submit(get_video_info_sync, bvid, cookie, skip_exists): bvid
for bvid in batch
}
for future in concurrent.futures.as_completed(future_to_bvid):
bvid = future_to_bvid[future]
try:
result = future.result()
if result is None: # 跳过的视频
continue
if result.status == "success":
results.append(result)
print(f"成功获取视频 {bvid} 的详情: {result.data.get('title', '')}")
else:
print(f"获取视频 {bvid} 的详情失败: {result.message}")
except Exception as e:
print(f"处理视频 {bvid} 时出错: {e}")
# 每批处理完后,批量保存到数据库
if results:
print(f"开始批量保存第 {i//batch_size + 1} 批视频详情...")
batch_result = batch_save_video_details(results)
total_success = batch_result["success"]
total_fail = batch_result["fail"]
# 合并错误统计
if "error_stats" in batch_result:
for error_type, count in batch_result["error_stats"].items():
if error_type not in error_stats:
error_stats[error_type] = 0
error_stats[error_type] += count
print(f"批次完成: 成功 {batch_result['success']},失败 {batch_result['fail']}")
results = [] # 清空结果列表,准备下一批
# 批次之间稍微暂停,减轻服务器压力
time.sleep(1) # 减少暂停时间
elif all_video_ids:
print(f"\n跳过视频详情获取 (process_video_details={process_video_details})")
print(f"如需获取视频详情,请使用/fetch/video-details-stats和/fetch/fetch-video-details接口")
return all_new_data
async def fetch_history(output_dir: str = "history_by_date", skip_exists: bool = False, process_video_details: bool = False) -> dict:
"""主函数:获取B站历史记录并同时获取视频详细信息存入视频库"""
try:
# 重新加载配置文件,确保获取最新的SESSDATA
current_config = load_config()
# 修改这里:直接使用 output_dir 而不是拼接 output 路径
full_output_dir = get_output_path(output_dir) # 这里 get_output_path 已经会添加 output 前缀
print("\n=== 路径信息 ===")
print(f"输出目录: {full_output_dir}")
print(f"目录存在: {os.path.exists(full_output_dir)}")
cookie = current_config.get('SESSDATA', '')
if not cookie:
return {"status": "error", "message": "未找到SESSDATA配置"}
latest_date = find_latest_local_history(output_dir) # 传入相对路径
# 由于fetch_and_compare_history现在是异步函数,需要使用await调用
new_history = await fetch_and_compare_history(cookie, latest_date, skip_exists, process_video_details)
result = {"status": "success", "message": "没有新记录需要更新", "data": {}}
if new_history:
save_result = save_history(new_history, output_dir) # 传入相对路径
result = save_result
# 如果需要处理视频详情
if process_video_details and save_result["status"] == "success":
print("\n=== 开始处理新记录的视频详情 ===")
# 获取所有新记录的bvid
new_bvids = []
for date, records in new_history.items():
for record in records:
if record.get("bvid"):
new_bvids.append(record["bvid"])
print(f"新记录中包含 {len(new_bvids)} 个视频ID")
# 调用获取视频详情的函数
if new_bvids:
video_details_result = await fetch_video_details_only(specific_videos=new_bvids)
result["video_details_result"] = video_details_result
if video_details_result["status"] == "success":
print("成功获取新记录的视频详情")
else:
print(f"获取新记录的视频详情时出错: {video_details_result['message']}")
return result
except Exception as e:
logging.error(f"获取历史记录时发生错误: {e}")
import traceback
logging.error(traceback.format_exc())
return {"status": "error", "message": str(e)}
# 新增: 批量获取历史记录中的视频详情但不重新获取历史记录
async def fetch_video_details_only(max_videos: int = 0, specific_videos: list = None, use_sessdata: bool = True) -> dict:
"""
从历史记录中获取视频ID,批量获取视频详情
Args:
max_videos: 本次最多处理的视频数量,0表示不限制
specific_videos: 指定要获取的视频ID列表,如果提供则优先使用这个列表
use_sessdata: 是否使用SESSDATA进行认证,默认为True
"""
try:
# 确保参数是合法整数
if max_videos is None:
max_videos = 0
print("\n=== 开始批量获取视频详情 ===")
# 获取cookie
current_config = load_config()
cookie = current_config.get('SESSDATA', '')
if not cookie:
return {"status": "error", "message": "未找到SESSDATA配置"}
# 如果没有指定视频列表,则自动获取待处理视频
if not specific_videos:
# 获取统计数据,包含待获取视频列表
stats = await get_video_details_stats()
if stats["status"] != "success":
return stats
# 获取待处理视频列表
videos_to_fetch = stats["data"]["pending_videos"]
total_videos_to_fetch = len(videos_to_fetch)
if total_videos_to_fetch == 0:
return {"status": "success", "message": "所有历史记录的视频详情都已获取", "data": {"skipped": True, "processed": 0}}
else:
# 使用指定的视频列表
videos_to_fetch = specific_videos
total_videos_to_fetch = len(videos_to_fetch)
print(f"使用指定的视频列表,共 {total_videos_to_fetch} 个视频")
# 限制每次处理的视频数量
if max_videos > 0 and len(videos_to_fetch) > max_videos:
print(f"限制处理的视频数量为 {max_videos} 个(总共 {total_videos_to_fetch} 个待处理)")
videos_to_fetch = videos_to_fetch[:max_videos]
print(f"本次将处理 {len(videos_to_fetch)} 个视频")
# 降低并发线程数,避免过高并发导致412错误
max_workers = min(15, len(videos_to_fetch)) # 最大15个线程
total_success = 0
total_fail = 0
skipped_invalid_count = 0
# 初始化错误统计
error_stats = {
"404_not_found": 0, # 视频不存在或已删除
"62002_invisible": 0, # 视频已设为私有或被隐藏
"412_banned": 0, # 请求被禁止/拒绝
"decode_error": 0, # 解码错误
"parse_error": 0, # JSON解析错误
"empty_data": 0, # 数据为空
"save_error": 0, # 保存过程出错
"other_error": 0 # 其他错误
}
# 存储错误视频信息
error_videos = []
# 分批处理,进一步减小每批的大小
batch_size = 20 # 确保每批最多20个
# 随机打乱视频顺序,避免按顺序请求被检测
random.shuffle(videos_to_fetch)
start_time = time.time()
for i in range(0, len(videos_to_fetch), batch_size):
batch = videos_to_fetch[i:i+batch_size]
batch_num = i//batch_size + 1
total_batches = (len(videos_to_fetch)-1)//batch_size + 1
print(f"处理第 {batch_num}/{total_batches} 批,共 {len(batch)} 个视频")
results = []
batch_success = 0
batch_fail = 0
batch_skipped = 0
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
future_to_bvid = {
executor.submit(get_video_info_sync, bvid, cookie, False, use_sessdata): bvid
for bvid in batch
}
for future in concurrent.futures.as_completed(future_to_bvid):
bvid = future_to_bvid[future]
try:
result = future.result()
# 检查是否为已知失效视频
if result and hasattr(result, 'is_known_invalid') and result.is_known_invalid:
batch_skipped += 1
skipped_invalid_count += 1
print(f"跳过已知失效视频 {bvid}")
continue
if result and result.status == "success":
results.append(result)
batch_success += 1
print(f"成功获取视频 {bvid} 的详情: {result.data.get('title', '')}")
else:
batch_fail += 1
error_msg = result.message if result and hasattr(result, 'message') else "未知错误"
error_type = result.error_type if result and hasattr(result, 'error_type') else "unknown"
print(f"获取视频 {bvid} 的详情失败: {error_msg}, 类型: {error_type}")
# 添加到错误视频列表
error_videos.append({
"bvid": bvid,
"error_type": error_type,
"error_message": error_msg
})
except Exception as e:
batch_fail += 1
print(f"处理视频 {bvid} 时出错: {e}")
# 添加到错误视频列表
error_videos.append({
"bvid": bvid,
"error_type": "exception",
"error_message": str(e)
})
# 批量保存结果
if results:
batch_result = batch_save_video_details(results)
total_success += batch_result["success"]
total_fail += batch_result["fail"] + (batch_fail - batch_result.get("invalid", 0))
# 合并错误统计
if "error_stats" in batch_result:
for error_type, count in batch_result["error_stats"].items():
if error_type in error_stats:
error_stats[error_type] += count
print(f"批次完成: 成功 {batch_result['success']},失败 {batch_result['fail']},跳过 {batch_skipped}")
else:
print(f"批次完成: 没有成功获取的视频,跳过 {batch_skipped}")
total_fail += batch_fail
# 计算进度
processed_videos = i + len(batch)
progress_percentage = (processed_videos / len(videos_to_fetch)) * 100
elapsed_time = time.time() - start_time
print(f"进度: {processed_videos}/{len(videos_to_fetch)} ({progress_percentage:.2f}%),耗时: {elapsed_time:.2f}秒")
# 批次之间暂停时间增加并随机化
batch_delay = 3 + random.random() * 4 # 3-7秒随机延迟
print(f"批次间暂停 {batch_delay:.2f} 秒...")
time.sleep(batch_delay)
# 如果失败太多,提前停止
if total_fail > 5 * total_success and total_fail > 10:
print(f"失败过多 (成功:{total_success},失败:{total_fail}),提前停止任务")
break
# 计算总耗时
total_elapsed_time = time.time() - start_time
# 打印最终错误统计
if total_fail > 0:
print("\n=== 错误类型统计 ===")
for error_type, count in error_stats.items():
if count > 0:
percentage = (count/total_fail*100) if total_fail > 0 else 0
print(f"- {error_type}: {count}次 ({percentage:.1f}%)")
# 获取剩余未处理的视频数量
remaining_videos = total_videos_to_fetch - len(videos_to_fetch)
# 如果使用的是指定视频列表,则不考虑剩余视频
if specific_videos:
remaining_videos = 0
# 返回处理结果
return {
"status": "success",
"message": f"批量获取视频详情完成,成功: {total_success},失败: {total_fail},跳过: {skipped_invalid_count}",
"data": {
"total_videos": total_videos_to_fetch,
"processed_videos": len(videos_to_fetch),
"success_count": total_success,
"fail_count": total_fail,
"skipped_invalid_count": skipped_invalid_count,
"remaining_videos": remaining_videos,
"elapsed_time": total_elapsed_time,
"error_stats": error_stats,
"error_videos": error_videos[:20] # 只返回前20个错误,避免响应过大
}
}
except Exception as e:
error_msg = f"批量获取视频详情时出错: {str(e)}"
print(error_msg)
import traceback
print(traceback.format_exc())
return {"status": "error", "message": error_msg}
async def get_invalid_videos_from_db(page=1, limit=50, error_type=None):
"""从数据库中获取失效视频列表"""
try:
# 连接数据库
db_path = get_output_path("video_library.db")
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row # 启用字典游标
cursor = conn.cursor()
# 构建查询条件
query_params = []
where_clause = ""
if error_type:
where_clause = "WHERE error_type = ?"
query_params.append(error_type)
# 获取总数
count_sql = f"""
SELECT COUNT(*) as total FROM invalid_videos {where_clause}
"""
cursor.execute(count_sql, query_params)
total = cursor.fetchone()["total"]
# 计算分页
offset = (page - 1) * limit
# 查询当前页数据
select_sql = f"""
SELECT
id, bvid, error_type, error_code, error_message,
first_check_time, last_check_time, check_count
FROM invalid_videos
{where_clause}
ORDER BY last_check_time DESC
LIMIT ? OFFSET ?
"""
query_params.extend([limit, offset])
cursor.execute(select_sql, query_params)
rows = cursor.fetchall()
# 转换为列表
items = []
for row in rows:
items.append(dict(row))
conn.close()
# 返回分页结果
return {
"total": total,
"page": page,
"limit": limit,
"has_more": total > page * limit,
"items": items
}
except Exception as e:
print(f"获取失效视频列表失败: {e}")
import traceback
print(traceback.format_exc())
raise e
## 新增: 获取视频详情统计数据
async def get_video_details_stats() -> dict:
"""
获取视频详情的统计数据
返回历史记录总数、已获取视频数、失效视频数、未获取视频数等信息
"""
try:
print("\n=== 获取视频详情统计数据 ===")
# 查询数据库中已有的历史记录,但尚未获取详情的视频
history_db_path = get_output_path("bilibili_history.db")
video_db_path = get_output_path("video_library.db")
# 查询历史记录数据库中的所有bvid
conn_history = sqlite3.connect(history_db_path)
cursor_history = conn_history.cursor()
print("查询历史记录数据库中的视频ID...")
# 首先获取所有年份的表
cursor_history.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND name LIKE 'bilibili_history_%'
""")
history_tables = cursor_history.fetchall()
if not history_tables:
return {"status": "error", "message": "未找到历史记录表", "data": None}
print(f"找到以下历史记录表: {[table[0] for table in history_tables]}")
# 构建查询所有年份表的UNION查询
all_bvids = []
for table in history_tables:
table_name = table[0]
cursor_history.execute(f"""
SELECT DISTINCT bvid FROM {table_name}
WHERE bvid IS NOT NULL AND bvid != ''
""")
bvids = [row[0] for row in cursor_history.fetchall()]
all_bvids = list(set(all_bvids + bvids))
print(f"从表 {table_name} 中找到 {len(bvids)} 个视频ID")
conn_history.close()
total_history_videos = len(all_bvids)
print(f"历史记录数据库中总共找到 {total_history_videos} 个不同的视频ID")
# 查询视频库中已有的bvid
conn_video = sqlite3.connect(video_db_path)
cursor_video = conn_video.cursor()
# 获取已存储详情的视频ID
try:
cursor_video.execute("SELECT bvid FROM video_details")
existing_bvids = {row[0] for row in cursor_video.fetchall()}
existing_videos_count = len(existing_bvids)
except sqlite3.OperationalError:
# 如果表不存在
existing_bvids = set()
existing_videos_count = 0
# 获取失效视频ID
try:
cursor_video.execute("SELECT bvid FROM invalid_videos")
invalid_bvids = {row[0] for row in cursor_video.fetchall()}
invalid_videos_count = len(invalid_bvids)
except sqlite3.OperationalError:
# 如果表不存在
invalid_bvids = set()
invalid_videos_count = 0
# 按错误类型统计失效视频
error_type_stats = {}
try:
cursor_video.execute("""
SELECT error_type, COUNT(*) as count
FROM invalid_videos
GROUP BY error_type
""")
for row in cursor_video.fetchall():
error_type_stats[row[0]] = row[1]
except sqlite3.OperationalError:
# 如果表不存在
pass
conn_video.close()
# 找出需要获取详情的视频ID (排除已获取和已知失效的)
videos_to_fetch = [bvid for bvid in all_bvids if bvid not in existing_bvids and bvid not in invalid_bvids]
pending_videos_count = len(videos_to_fetch)
print(f"\n=== 视频详情统计 ===")
print(f"历史记录总视频数: {total_history_videos}")
print(f"已获取详情视频数: {existing_videos_count}")
print(f"已知失效视频数: {invalid_videos_count}")
print(f"待获取视频数: {pending_videos_count}")
completion_percentage = ((existing_videos_count + invalid_videos_count) / total_history_videos) * 100 if total_history_videos > 0 else 0
# 返回统计结果
return {
"status": "success",
"message": "成功获取视频详情统计数据",
"data": {
"total_history_videos": total_history_videos, # 历史记录总视频数
"existing_videos_count": existing_videos_count, # 已获取详情的视频数
"invalid_videos_count": invalid_videos_count, # 已知失效的视频数
"pending_videos_count": pending_videos_count, # 待获取的视频数
"completion_percentage": round(completion_percentage, 2), # 完成百分比
"error_type_stats": error_type_stats, # 失效视频类型统计
"pending_videos": videos_to_fetch # 待获取的视频ID列表
}
}
except Exception as e:
error_msg = f"获取视频详情统计数据时出错: {str(e)}"
print(error_msg)
import traceback
print(traceback.format_exc())
return {"status": "error", "message": error_msg, "data": None}
|
281677160/openwrt-package
| 12,027
|
luci-app-homeproxy/htdocs/luci-static/resources/homeproxy.js
|
/*
* SPDX-License-Identifier: GPL-2.0-only
*
* Copyright (C) 2022-2025 ImmortalWrt.org
*/
'use strict';
'require baseclass';
'require form';
'require fs';
'require rpc';
'require uci';
'require ui';
return baseclass.extend({
dns_strategy: {
'': _('Default'),
'prefer_ipv4': _('Prefer IPv4'),
'prefer_ipv6': _('Prefer IPv6'),
'ipv4_only': _('IPv4 only'),
'ipv6_only': _('IPv6 only')
},
shadowsocks_encrypt_length: {
/* AEAD */
'aes-128-gcm': 0,
'aes-192-gcm': 0,
'aes-256-gcm': 0,
'chacha20-ietf-poly1305': 0,
'xchacha20-ietf-poly1305': 0,
/* AEAD 2022 */
'2022-blake3-aes-128-gcm': 16,
'2022-blake3-aes-256-gcm': 32,
'2022-blake3-chacha20-poly1305': 32
},
shadowsocks_encrypt_methods: [
/* Stream */
'none',
/* AEAD */
'aes-128-gcm',
'aes-192-gcm',
'aes-256-gcm',
'chacha20-ietf-poly1305',
'xchacha20-ietf-poly1305',
/* AEAD 2022 */
'2022-blake3-aes-128-gcm',
'2022-blake3-aes-256-gcm',
'2022-blake3-chacha20-poly1305'
],
tls_cipher_suites: [
'TLS_RSA_WITH_AES_128_CBC_SHA',
'TLS_RSA_WITH_AES_256_CBC_SHA',
'TLS_RSA_WITH_AES_128_GCM_SHA256',
'TLS_RSA_WITH_AES_256_GCM_SHA384',
'TLS_AES_128_GCM_SHA256',
'TLS_AES_256_GCM_SHA384',
'TLS_CHACHA20_POLY1305_SHA256',
'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA',
'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA',
'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA',
'TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA',
'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256',
'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384',
'TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256',
'TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384',
'TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256',
'TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256'
],
tls_versions: [
'1.0',
'1.1',
'1.2',
'1.3'
],
CBIStaticList: form.DynamicList.extend({
__name__: 'CBI.StaticList',
renderWidget: function(/* ... */) {
let dl = form.DynamicList.prototype.renderWidget.apply(this, arguments);
dl.querySelector('.add-item ul > li[data-value="-"]')?.remove();
return dl;
}
}),
calcStringMD5(e) {
/* Thanks to https://stackoverflow.com/a/41602636 */
let h = (a, b) => {
let c, d, e, f, g;
c = a & 2147483648;
d = b & 2147483648;
e = a & 1073741824;
f = b & 1073741824;
g = (a & 1073741823) + (b & 1073741823);
return e & f ? g ^ 2147483648 ^ c ^ d : e | f ? g & 1073741824 ? g ^ 3221225472 ^ c ^ d : g ^ 1073741824 ^ c ^ d : g ^ c ^ d;
}, k = (a, b, c, d, e, f, g) => h((a = h(a, h(h(b & c | ~b & d, e), g))) << f | a >>> 32 - f, b),
l = (a, b, c, d, e, f, g) => h((a = h(a, h(h(b & d | c & ~d, e), g))) << f | a >>> 32 - f, b),
m = (a, b, c, d, e, f, g) => h((a = h(a, h(h(b ^ c ^ d, e), g))) << f | a >>> 32 - f, b),
n = (a, b, c, d, e, f, g) => h((a = h(a, h(h(c ^ (b | ~d), e), g))) << f | a >>> 32 - f, b),
p = a => { let b = '', d = ''; for (let c = 0; c <= 3; c++) d = a >>> 8 * c & 255, d = '0' + d.toString(16), b += d.substr(d.length - 2, 2); return b; };
let f = [], q, r, s, t, a, b, c, d;
e = (() => {
e = e.replace(/\r\n/g, '\n');
let b = '';
for (let d = 0; d < e.length; d++) {
let c = e.charCodeAt(d);
b += c < 128 ? String.fromCharCode(c) : c < 2048 ? String.fromCharCode(c >> 6 | 192) + String.fromCharCode(c & 63 | 128) :
String.fromCharCode(c >> 12 | 224) + String.fromCharCode(c >> 6 & 63 | 128) + String.fromCharCode(c & 63 | 128);
}
return b;
})();
f = (() => {
let c = e.length, a = c + 8, d = 16 * ((a - a % 64) / 64 + 1), b = Array(d - 1), f = 0, g = 0;
for (; g < c;) a = (g - g % 4) / 4, f = g % 4 * 8, b[a] |= e.charCodeAt(g) << f, g++;
a = (g - g % 4) / 4, b[a] |= 128 << g % 4 * 8, b[d - 2] = c << 3, b[d - 1] = c >>> 29;
return b;
})();
a = 1732584193, b = 4023233417, c = 2562383102, d = 271733878;
for (e = 0; e < f.length; e += 16) {
q = a, r = b, s = c, t = d;
a = k(a, b, c, d, f[e + 0], 7, 3614090360), d = k(d, a, b, c, f[e + 1], 12, 3905402710),
c = k(c, d, a, b, f[e + 2], 17, 606105819), b = k(b, c, d, a, f[e + 3], 22, 3250441966),
a = k(a, b, c, d, f[e + 4], 7, 4118548399), d = k(d, a, b, c, f[e + 5], 12, 1200080426),
c = k(c, d, a, b, f[e + 6], 17, 2821735955), b = k(b, c, d, a, f[e + 7], 22, 4249261313),
a = k(a, b, c, d, f[e + 8], 7, 1770035416), d = k(d, a, b, c, f[e + 9], 12, 2336552879),
c = k(c, d, a, b, f[e + 10], 17, 4294925233), b = k(b, c, d, a, f[e + 11], 22, 2304563134),
a = k(a, b, c, d, f[e + 12], 7, 1804603682), d = k(d, a, b, c, f[e + 13], 12, 4254626195),
c = k(c, d, a, b, f[e + 14], 17, 2792965006), b = k(b, c, d, a, f[e + 15], 22, 1236535329),
a = l(a, b, c, d, f[e + 1], 5, 4129170786), d = l(d, a, b, c, f[e + 6], 9, 3225465664),
c = l(c, d, a, b, f[e + 11], 14, 643717713), b = l(b, c, d, a, f[e + 0], 20, 3921069994),
a = l(a, b, c, d, f[e + 5], 5, 3593408605), d = l(d, a, b, c, f[e + 10], 9, 38016083),
c = l(c, d, a, b, f[e + 15], 14, 3634488961), b = l(b, c, d, a, f[e + 4], 20, 3889429448),
a = l(a, b, c, d, f[e + 9], 5, 568446438), d = l(d, a, b, c, f[e + 14], 9, 3275163606),
c = l(c, d, a, b, f[e + 3], 14, 4107603335), b = l(b, c, d, a, f[e + 8], 20, 1163531501),
a = l(a, b, c, d, f[e + 13], 5, 2850285829), d = l(d, a, b, c, f[e + 2], 9, 4243563512),
c = l(c, d, a, b, f[e + 7], 14, 1735328473), b = l(b, c, d, a, f[e + 12], 20, 2368359562),
a = m(a, b, c, d, f[e + 5], 4, 4294588738), d = m(d, a, b, c, f[e + 8], 11, 2272392833),
c = m(c, d, a, b, f[e + 11], 16, 1839030562), b = m(b, c, d, a, f[e + 14], 23, 4259657740),
a = m(a, b, c, d, f[e + 1], 4, 2763975236), d = m(d, a, b, c, f[e + 4], 11, 1272893353),
c = m(c, d, a, b, f[e + 7], 16, 4139469664), b = m(b, c, d, a, f[e + 10], 23, 3200236656),
a = m(a, b, c, d, f[e + 13], 4, 681279174), d = m(d, a, b, c, f[e + 0], 11, 3936430074),
c = m(c, d, a, b, f[e + 3], 16, 3572445317), b = m(b, c, d, a, f[e + 6], 23, 76029189),
a = m(a, b, c, d, f[e + 9], 4, 3654602809), d = m(d, a, b, c, f[e + 12], 11, 3873151461),
c = m(c, d, a, b, f[e + 15], 16, 530742520), b = m(b, c, d, a, f[e + 2], 23, 3299628645),
a = n(a, b, c, d, f[e + 0], 6, 4096336452), d = n(d, a, b, c, f[e + 7], 10, 1126891415),
c = n(c, d, a, b, f[e + 14], 15, 2878612391), b = n(b, c, d, a, f[e + 5], 21, 4237533241),
a = n(a, b, c, d, f[e + 12], 6, 1700485571), d = n(d, a, b, c, f[e + 3], 10, 2399980690),
c = n(c, d, a, b, f[e + 10], 15, 4293915773), b = n(b, c, d, a, f[e + 1], 21, 2240044497),
a = n(a, b, c, d, f[e + 8], 6, 1873313359), d = n(d, a, b, c, f[e + 15], 10, 4264355552),
c = n(c, d, a, b, f[e + 6], 15, 2734768916), b = n(b, c, d, a, f[e + 13], 21, 1309151649),
a = n(a, b, c, d, f[e + 4], 6, 4149444226), d = n(d, a, b, c, f[e + 11], 10, 3174756917),
c = n(c, d, a, b, f[e + 2], 15, 718787259), b = n(b, c, d, a, f[e + 9], 21, 3951481745),
a = h(a, q), b = h(b, r), c = h(c, s), d = h(d, t);
}
return (p(a) + p(b) + p(c) + p(d)).toLowerCase();
},
decodeBase64Str(str) {
if (!str)
return null;
/* Thanks to luci-app-ssr-plus */
str = str.replace(/-/g, '+').replace(/_/g, '/');
let padding = (4 - str.length % 4) % 4;
if (padding)
str = str + Array(padding + 1).join('=');
return decodeURIComponent(Array.prototype.map.call(atob(str), (c) =>
'%' + ('00' + c.charCodeAt(0).toString(16)).slice(-2)
).join(''));
},
getBuiltinFeatures() {
const callGetSingBoxFeatures = rpc.declare({
object: 'luci.homeproxy',
method: 'singbox_get_features',
expect: { '': {} }
});
return L.resolveDefault(callGetSingBoxFeatures(), {});
},
generateRand(type, length) {
let byteArr;
if (['base64', 'hex'].includes(type))
byteArr = crypto.getRandomValues(new Uint8Array(length));
switch (type) {
case 'base64':
/* Thanks to https://stackoverflow.com/questions/9267899 */
return btoa(String.fromCharCode.apply(null, byteArr));
case 'hex':
return Array.from(byteArr, (byte) =>
(byte & 255).toString(16).padStart(2, '0')
).join('');
case 'uuid':
/* Thanks to https://stackoverflow.com/a/2117523 */
return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, (c) =>
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
);
default:
return null;
};
},
loadDefaultLabel(uciconfig, ucisection) {
let label = uci.get(uciconfig, ucisection, 'label');
if (label) {
return label;
} else {
uci.set(uciconfig, ucisection, 'label', ucisection);
return ucisection;
}
},
loadModalTitle(title, addtitle, uciconfig, ucisection) {
let label = uci.get(uciconfig, ucisection, 'label');
return label ? title + ' » ' + label : addtitle;
},
renderSectionAdd(section, extra_class) {
let el = form.GridSection.prototype.renderSectionAdd.apply(section, [ extra_class ]),
nameEl = el.querySelector('.cbi-section-create-name');
ui.addValidator(nameEl, 'uciname', true, (v) => {
let button = el.querySelector('.cbi-section-create > .cbi-button-add');
let uciconfig = section.uciconfig || section.map.config;
if (!v) {
button.disabled = true;
return true;
} else if (uci.get(uciconfig, v)) {
button.disabled = true;
return _('Expecting: %s').format(_('unique UCI identifier'));
} else {
button.disabled = null;
return true;
}
}, 'blur', 'keyup');
return el;
},
uploadCertificate(_option, type, filename, ev) {
const callWriteCertificate = rpc.declare({
object: 'luci.homeproxy',
method: 'certificate_write',
params: ['filename'],
expect: { '': {} }
});
return ui.uploadFile('/tmp/homeproxy_certificate.tmp', ev.target)
.then(L.bind((_btn, res) => {
return L.resolveDefault(callWriteCertificate(filename), {}).then((ret) => {
if (ret.result === true)
ui.addNotification(null, E('p', _('Your %s was successfully uploaded. Size: %sB.').format(type, res.size)));
else
ui.addNotification(null, E('p', _('Failed to upload %s, error: %s.').format(type, ret.error)));
});
}, this, ev.target))
.catch((e) => { ui.addNotification(null, E('p', e.message)) });
},
validateBase64Key(length, section_id, value) {
/* Thanks to luci-proto-wireguard */
if (section_id && value)
if (value.length !== length || !value.match(/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/) || value[length-1] !== '=')
return _('Expecting: %s').format(_('valid base64 key with %d characters').format(length));
return true;
},
validateCertificatePath(section_id, value) {
if (section_id && value)
if (!value.match(/^(\/etc\/homeproxy\/certs\/|\/etc\/acme\/|\/etc\/ssl\/).+$/))
return _('Expecting: %s').format(_('/etc/homeproxy/certs/..., /etc/acme/..., /etc/ssl/...'));
return true;
},
validatePortRange(section_id, value) {
if (section_id && value) {
value = value.match(/^(\d+)?\:(\d+)?$/);
if (value && (value[1] || value[2])) {
if (!value[1])
value[1] = 0;
else if (!value[2])
value[2] = 65535;
if (value[1] < value[2] && value[2] <= 65535)
return true;
}
return _('Expecting: %s').format( _('valid port range (port1:port2)'));
}
return true;
},
validateUniqueValue(uciconfig, ucisection, ucioption, section_id, value) {
if (section_id) {
if (!value)
return _('Expecting: %s').format(_('non-empty value'));
if (ucioption === 'node' && value === 'urltest')
return true;
let duplicate = false;
uci.sections(uciconfig, ucisection, (res) => {
if (res['.name'] !== section_id)
if (res[ucioption] === value)
duplicate = true
});
if (duplicate)
return _('Expecting: %s').format(_('unique value'));
}
return true;
},
validateUUID(section_id, value) {
if (section_id) {
if (!value)
return _('Expecting: %s').format(_('non-empty value'));
else if (value.match('^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$') === null)
return _('Expecting: %s').format(_('valid uuid'));
}
return true;
}
});
|
28harishkumar/blog
| 6,606
|
resources/assets/less/bootstrap/mixins/vendor-prefixes.less
|
// Vendor Prefixes
//
// All vendor mixins are deprecated as of v3.2.0 due to the introduction of
// Autoprefixer in our Gruntfile. They will be removed in v4.
// - Animations
// - Backface visibility
// - Box shadow
// - Box sizing
// - Content columns
// - Hyphens
// - Placeholder text
// - Transformations
// - Transitions
// - User Select
// Animations
.animation(@animation) {
-webkit-animation: @animation;
-o-animation: @animation;
animation: @animation;
}
.animation-name(@name) {
-webkit-animation-name: @name;
animation-name: @name;
}
.animation-duration(@duration) {
-webkit-animation-duration: @duration;
animation-duration: @duration;
}
.animation-timing-function(@timing-function) {
-webkit-animation-timing-function: @timing-function;
animation-timing-function: @timing-function;
}
.animation-delay(@delay) {
-webkit-animation-delay: @delay;
animation-delay: @delay;
}
.animation-iteration-count(@iteration-count) {
-webkit-animation-iteration-count: @iteration-count;
animation-iteration-count: @iteration-count;
}
.animation-direction(@direction) {
-webkit-animation-direction: @direction;
animation-direction: @direction;
}
.animation-fill-mode(@fill-mode) {
-webkit-animation-fill-mode: @fill-mode;
animation-fill-mode: @fill-mode;
}
// Backface visibility
// Prevent browsers from flickering when using CSS 3D transforms.
// Default value is `visible`, but can be changed to `hidden`
.backface-visibility(@visibility){
-webkit-backface-visibility: @visibility;
-moz-backface-visibility: @visibility;
backface-visibility: @visibility;
}
// Drop shadows
//
// Note: Deprecated `.box-shadow()` as of v3.1.0 since all of Bootstrap's
// supported browsers that have box shadow capabilities now support it.
.box-shadow(@shadow) {
-webkit-box-shadow: @shadow; // iOS <4.3 & Android <4.1
box-shadow: @shadow;
}
// Box sizing
.box-sizing(@boxmodel) {
-webkit-box-sizing: @boxmodel;
-moz-box-sizing: @boxmodel;
box-sizing: @boxmodel;
}
// CSS3 Content Columns
.content-columns(@column-count; @column-gap: @grid-gutter-width) {
-webkit-column-count: @column-count;
-moz-column-count: @column-count;
column-count: @column-count;
-webkit-column-gap: @column-gap;
-moz-column-gap: @column-gap;
column-gap: @column-gap;
}
// Optional hyphenation
.hyphens(@mode: auto) {
word-wrap: break-word;
-webkit-hyphens: @mode;
-moz-hyphens: @mode;
-ms-hyphens: @mode; // IE10+
-o-hyphens: @mode;
hyphens: @mode;
}
// Placeholder text
.placeholder(@color: @input-color-placeholder) {
// Firefox
&::-moz-placeholder {
color: @color;
opacity: 1; // See https://github.com/twbs/bootstrap/pull/11526
}
&:-ms-input-placeholder { color: @color; } // Internet Explorer 10+
&::-webkit-input-placeholder { color: @color; } // Safari and Chrome
}
// Transformations
.scale(@ratio) {
-webkit-transform: scale(@ratio);
-ms-transform: scale(@ratio); // IE9 only
-o-transform: scale(@ratio);
transform: scale(@ratio);
}
.scale(@ratioX; @ratioY) {
-webkit-transform: scale(@ratioX, @ratioY);
-ms-transform: scale(@ratioX, @ratioY); // IE9 only
-o-transform: scale(@ratioX, @ratioY);
transform: scale(@ratioX, @ratioY);
}
.scaleX(@ratio) {
-webkit-transform: scaleX(@ratio);
-ms-transform: scaleX(@ratio); // IE9 only
-o-transform: scaleX(@ratio);
transform: scaleX(@ratio);
}
.scaleY(@ratio) {
-webkit-transform: scaleY(@ratio);
-ms-transform: scaleY(@ratio); // IE9 only
-o-transform: scaleY(@ratio);
transform: scaleY(@ratio);
}
.skew(@x; @y) {
-webkit-transform: skewX(@x) skewY(@y);
-ms-transform: skewX(@x) skewY(@y); // See https://github.com/twbs/bootstrap/issues/4885; IE9+
-o-transform: skewX(@x) skewY(@y);
transform: skewX(@x) skewY(@y);
}
.translate(@x; @y) {
-webkit-transform: translate(@x, @y);
-ms-transform: translate(@x, @y); // IE9 only
-o-transform: translate(@x, @y);
transform: translate(@x, @y);
}
.translate3d(@x; @y; @z) {
-webkit-transform: translate3d(@x, @y, @z);
transform: translate3d(@x, @y, @z);
}
.rotate(@degrees) {
-webkit-transform: rotate(@degrees);
-ms-transform: rotate(@degrees); // IE9 only
-o-transform: rotate(@degrees);
transform: rotate(@degrees);
}
.rotateX(@degrees) {
-webkit-transform: rotateX(@degrees);
-ms-transform: rotateX(@degrees); // IE9 only
-o-transform: rotateX(@degrees);
transform: rotateX(@degrees);
}
.rotateY(@degrees) {
-webkit-transform: rotateY(@degrees);
-ms-transform: rotateY(@degrees); // IE9 only
-o-transform: rotateY(@degrees);
transform: rotateY(@degrees);
}
.perspective(@perspective) {
-webkit-perspective: @perspective;
-moz-perspective: @perspective;
perspective: @perspective;
}
.perspective-origin(@perspective) {
-webkit-perspective-origin: @perspective;
-moz-perspective-origin: @perspective;
perspective-origin: @perspective;
}
.transform-origin(@origin) {
-webkit-transform-origin: @origin;
-moz-transform-origin: @origin;
-ms-transform-origin: @origin; // IE9 only
transform-origin: @origin;
}
// Transitions
.transition(@transition) {
-webkit-transition: @transition;
-o-transition: @transition;
transition: @transition;
}
.transition-property(@transition-property) {
-webkit-transition-property: @transition-property;
transition-property: @transition-property;
}
.transition-delay(@transition-delay) {
-webkit-transition-delay: @transition-delay;
transition-delay: @transition-delay;
}
.transition-duration(@transition-duration) {
-webkit-transition-duration: @transition-duration;
transition-duration: @transition-duration;
}
.transition-timing-function(@timing-function) {
-webkit-transition-timing-function: @timing-function;
transition-timing-function: @timing-function;
}
.transition-transform(@transition) {
-webkit-transition: -webkit-transform @transition;
-moz-transition: -moz-transform @transition;
-o-transition: -o-transform @transition;
transition: transform @transition;
}
// User select
// For selecting text on the page
.user-select(@select) {
-webkit-user-select: @select;
-moz-user-select: @select;
-ms-user-select: @select; // IE10+
user-select: @select;
}
|
28harishkumar/blog
| 2,641
|
resources/assets/less/bootstrap/mixins/forms.less
|
// Form validation states
//
// Used in forms.less to generate the form validation CSS for warnings, errors,
// and successes.
.form-control-validation(@text-color: #555; @border-color: #ccc; @background-color: #f5f5f5) {
// Color the label and help text
.help-block,
.control-label,
.radio,
.checkbox,
.radio-inline,
.checkbox-inline,
&.radio label,
&.checkbox label,
&.radio-inline label,
&.checkbox-inline label {
color: @text-color;
}
// Set the border and box shadow on specific inputs to match
.form-control {
border-color: @border-color;
.box-shadow(inset 0 1px 1px rgba(0,0,0,.075)); // Redeclare so transitions work
&:focus {
border-color: darken(@border-color, 10%);
@shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 6px lighten(@border-color, 20%);
.box-shadow(@shadow);
}
}
// Set validation states also for addons
.input-group-addon {
color: @text-color;
border-color: @border-color;
background-color: @background-color;
}
// Optional feedback icon
.form-control-feedback {
color: @text-color;
}
}
// Form control focus state
//
// Generate a customized focus state and for any input with the specified color,
// which defaults to the `@input-border-focus` variable.
//
// We highly encourage you to not customize the default value, but instead use
// this to tweak colors on an as-needed basis. This aesthetic change is based on
// WebKit's default styles, but applicable to a wider range of browsers. Its
// usability and accessibility should be taken into account with any change.
//
// Example usage: change the default blue border and shadow to white for better
// contrast against a dark gray background.
.form-control-focus(@color: @input-border-focus) {
@color-rgba: rgba(red(@color), green(@color), blue(@color), .6);
&:focus {
border-color: @color;
outline: 0;
.box-shadow(~"inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px @{color-rgba}");
}
}
// Form control sizing
//
// Relative text size, padding, and border-radii changes for form controls. For
// horizontal sizing, wrap controls in the predefined grid classes. `<select>`
// element gets special love because it's special, and that's a fact!
.input-size(@input-height; @padding-vertical; @padding-horizontal; @font-size; @line-height; @border-radius) {
height: @input-height;
padding: @padding-vertical @padding-horizontal;
font-size: @font-size;
line-height: @line-height;
border-radius: @border-radius;
select& {
height: @input-height;
line-height: @input-height;
}
textarea&,
select[multiple]& {
height: auto;
}
}
|
28harishkumar/blog
| 3,094
|
resources/assets/less/bootstrap/mixins/grid.less
|
// Grid system
//
// Generate semantic grid columns with these mixins.
// Centered container element
.container-fixed(@gutter: @grid-gutter-width) {
margin-right: auto;
margin-left: auto;
padding-left: (@gutter / 2);
padding-right: (@gutter / 2);
&:extend(.clearfix all);
}
// Creates a wrapper for a series of columns
.make-row(@gutter: @grid-gutter-width) {
margin-left: (@gutter / -2);
margin-right: (@gutter / -2);
&:extend(.clearfix all);
}
// Generate the extra small columns
.make-xs-column(@columns; @gutter: @grid-gutter-width) {
position: relative;
float: left;
width: percentage((@columns / @grid-columns));
min-height: 1px;
padding-left: (@gutter / 2);
padding-right: (@gutter / 2);
}
.make-xs-column-offset(@columns) {
margin-left: percentage((@columns / @grid-columns));
}
.make-xs-column-push(@columns) {
left: percentage((@columns / @grid-columns));
}
.make-xs-column-pull(@columns) {
right: percentage((@columns / @grid-columns));
}
// Generate the small columns
.make-sm-column(@columns; @gutter: @grid-gutter-width) {
position: relative;
min-height: 1px;
padding-left: (@gutter / 2);
padding-right: (@gutter / 2);
@media (min-width: @screen-sm-min) {
float: left;
width: percentage((@columns / @grid-columns));
}
}
.make-sm-column-offset(@columns) {
@media (min-width: @screen-sm-min) {
margin-left: percentage((@columns / @grid-columns));
}
}
.make-sm-column-push(@columns) {
@media (min-width: @screen-sm-min) {
left: percentage((@columns / @grid-columns));
}
}
.make-sm-column-pull(@columns) {
@media (min-width: @screen-sm-min) {
right: percentage((@columns / @grid-columns));
}
}
// Generate the medium columns
.make-md-column(@columns; @gutter: @grid-gutter-width) {
position: relative;
min-height: 1px;
padding-left: (@gutter / 2);
padding-right: (@gutter / 2);
@media (min-width: @screen-md-min) {
float: left;
width: percentage((@columns / @grid-columns));
}
}
.make-md-column-offset(@columns) {
@media (min-width: @screen-md-min) {
margin-left: percentage((@columns / @grid-columns));
}
}
.make-md-column-push(@columns) {
@media (min-width: @screen-md-min) {
left: percentage((@columns / @grid-columns));
}
}
.make-md-column-pull(@columns) {
@media (min-width: @screen-md-min) {
right: percentage((@columns / @grid-columns));
}
}
// Generate the large columns
.make-lg-column(@columns; @gutter: @grid-gutter-width) {
position: relative;
min-height: 1px;
padding-left: (@gutter / 2);
padding-right: (@gutter / 2);
@media (min-width: @screen-lg-min) {
float: left;
width: percentage((@columns / @grid-columns));
}
}
.make-lg-column-offset(@columns) {
@media (min-width: @screen-lg-min) {
margin-left: percentage((@columns / @grid-columns));
}
}
.make-lg-column-push(@columns) {
@media (min-width: @screen-lg-min) {
left: percentage((@columns / @grid-columns));
}
}
.make-lg-column-pull(@columns) {
@media (min-width: @screen-lg-min) {
right: percentage((@columns / @grid-columns));
}
}
|
28harishkumar/blog
| 1,062
|
resources/assets/less/bootstrap/mixins/image.less
|
// Image Mixins
// - Responsive image
// - Retina image
// Responsive image
//
// Keep images from scaling beyond the width of their parents.
.img-responsive(@display: block) {
display: @display;
max-width: 100%; // Part 1: Set a maximum relative to the parent
height: auto; // Part 2: Scale the height according to the width, otherwise you get stretching
}
// Retina image
//
// Short retina mixin for setting background-image and -size. Note that the
// spelling of `min--moz-device-pixel-ratio` is intentional.
.img-retina(@file-1x; @file-2x; @width-1x; @height-1x) {
background-image: url("@{file-1x}");
@media
only screen and (-webkit-min-device-pixel-ratio: 2),
only screen and ( min--moz-device-pixel-ratio: 2),
only screen and ( -o-min-device-pixel-ratio: 2/1),
only screen and ( min-device-pixel-ratio: 2),
only screen and ( min-resolution: 192dpi),
only screen and ( min-resolution: 2dppx) {
background-image: url("@{file-2x}");
background-size: @width-1x @height-1x;
}
}
|
28harishkumar/blog
| 1,080
|
resources/assets/less/bootstrap/mixins/buttons.less
|
// Button variants
//
// Easily pump out default styles, as well as :hover, :focus, :active,
// and disabled options for all buttons
.button-variant(@color; @background; @border) {
color: @color;
background-color: @background;
border-color: @border;
&:hover,
&:focus,
&.focus,
&:active,
&.active,
.open > .dropdown-toggle& {
color: @color;
background-color: darken(@background, 10%);
border-color: darken(@border, 12%);
}
&:active,
&.active,
.open > .dropdown-toggle& {
background-image: none;
}
&.disabled,
&[disabled],
fieldset[disabled] & {
&,
&:hover,
&:focus,
&.focus,
&:active,
&.active {
background-color: @background;
border-color: @border;
}
}
.badge {
color: @background;
background-color: @color;
}
}
// Button sizes
.button-size(@padding-vertical; @padding-horizontal; @font-size; @line-height; @border-radius) {
padding: @padding-vertical @padding-horizontal;
font-size: @font-size;
line-height: @line-height;
border-radius: @border-radius;
}
|
2929004360/ruoyi-sign
| 5,953
|
ruoyi-common/src/main/java/com/ruoyi/common/core/redis/RedisCache.java
|
package com.ruoyi.common.core.redis;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.BoundSetOperations;
import org.springframework.data.redis.core.HashOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ValueOperations;
import org.springframework.stereotype.Component;
/**
* spring redis 工具类
*
* @author ruoyi
**/
@SuppressWarnings(value = { "unchecked", "rawtypes" })
@Component
public class RedisCache
{
@Autowired
public RedisTemplate redisTemplate;
/**
* 缓存基本的对象,Integer、String、实体类等
*
* @param key 缓存的键值
* @param value 缓存的值
*/
public <T> void setCacheObject(final String key, final T value)
{
redisTemplate.opsForValue().set(key, value);
}
/**
* 缓存基本的对象,Integer、String、实体类等
*
* @param key 缓存的键值
* @param value 缓存的值
* @param timeout 时间
* @param timeUnit 时间颗粒度
*/
public <T> void setCacheObject(final String key, final T value, final Integer timeout, final TimeUnit timeUnit)
{
redisTemplate.opsForValue().set(key, value, timeout, timeUnit);
}
/**
* 设置有效时间
*
* @param key Redis键
* @param timeout 超时时间
* @return true=设置成功;false=设置失败
*/
public boolean expire(final String key, final long timeout)
{
return expire(key, timeout, TimeUnit.SECONDS);
}
/**
* 设置有效时间
*
* @param key Redis键
* @param timeout 超时时间
* @param unit 时间单位
* @return true=设置成功;false=设置失败
*/
public boolean expire(final String key, final long timeout, final TimeUnit unit)
{
return redisTemplate.expire(key, timeout, unit);
}
/**
* 获取有效时间
*
* @param key Redis键
* @return 有效时间
*/
public long getExpire(final String key)
{
return redisTemplate.getExpire(key);
}
/**
* 判断 key是否存在
*
* @param key 键
* @return true 存在 false不存在
*/
public Boolean hasKey(String key)
{
return redisTemplate.hasKey(key);
}
/**
* 获得缓存的基本对象。
*
* @param key 缓存键值
* @return 缓存键值对应的数据
*/
public <T> T getCacheObject(final String key)
{
ValueOperations<String, T> operation = redisTemplate.opsForValue();
return operation.get(key);
}
/**
* 删除单个对象
*
* @param key
*/
public boolean deleteObject(final String key)
{
return redisTemplate.delete(key);
}
/**
* 删除集合对象
*
* @param collection 多个对象
* @return
*/
public boolean deleteObject(final Collection collection)
{
return redisTemplate.delete(collection) > 0;
}
/**
* 缓存List数据
*
* @param key 缓存的键值
* @param dataList 待缓存的List数据
* @return 缓存的对象
*/
public <T> long setCacheList(final String key, final List<T> dataList)
{
Long count = redisTemplate.opsForList().rightPushAll(key, dataList);
return count == null ? 0 : count;
}
/**
* 获得缓存的list对象
*
* @param key 缓存的键值
* @return 缓存键值对应的数据
*/
public <T> List<T> getCacheList(final String key)
{
return redisTemplate.opsForList().range(key, 0, -1);
}
/**
* 缓存Set
*
* @param key 缓存键值
* @param dataSet 缓存的数据
* @return 缓存数据的对象
*/
public <T> BoundSetOperations<String, T> setCacheSet(final String key, final Set<T> dataSet)
{
BoundSetOperations<String, T> setOperation = redisTemplate.boundSetOps(key);
Iterator<T> it = dataSet.iterator();
while (it.hasNext())
{
setOperation.add(it.next());
}
return setOperation;
}
/**
* 获得缓存的set
*
* @param key
* @return
*/
public <T> Set<T> getCacheSet(final String key)
{
return redisTemplate.opsForSet().members(key);
}
/**
* 缓存Map
*
* @param key
* @param dataMap
*/
public <T> void setCacheMap(final String key, final Map<String, T> dataMap)
{
if (dataMap != null) {
redisTemplate.opsForHash().putAll(key, dataMap);
}
}
/**
* 获得缓存的Map
*
* @param key
* @return
*/
public <T> Map<String, T> getCacheMap(final String key)
{
return redisTemplate.opsForHash().entries(key);
}
/**
* 往Hash中存入数据
*
* @param key Redis键
* @param hKey Hash键
* @param value 值
*/
public <T> void setCacheMapValue(final String key, final String hKey, final T value)
{
redisTemplate.opsForHash().put(key, hKey, value);
}
/**
* 获取Hash中的数据
*
* @param key Redis键
* @param hKey Hash键
* @return Hash中的对象
*/
public <T> T getCacheMapValue(final String key, final String hKey)
{
HashOperations<String, String, T> opsForHash = redisTemplate.opsForHash();
return opsForHash.get(key, hKey);
}
/**
* 获取多个Hash中的数据
*
* @param key Redis键
* @param hKeys Hash键集合
* @return Hash对象集合
*/
public <T> List<T> getMultiCacheMapValue(final String key, final Collection<Object> hKeys)
{
return redisTemplate.opsForHash().multiGet(key, hKeys);
}
/**
* 删除Hash中的某条数据
*
* @param key Redis键
* @param hKey Hash键
* @return 是否成功
*/
public boolean deleteCacheMapValue(final String key, final String hKey)
{
return redisTemplate.opsForHash().delete(key, hKey) > 0;
}
/**
* 获得缓存的基本对象列表
*
* @param pattern 字符串前缀
* @return 对象列表
*/
public Collection<String> keys(final String pattern)
{
return redisTemplate.keys(pattern);
}
}
|
2977094657/BilibiliHistoryFetcher
| 39,756
|
scripts/popular_videos.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
bilibili_popular_videos.py - 获取B站热门视频列表
该脚本用于获取哔哩哔哩当前热门视频列表,使用了WBI签名验证
可以作为API接口导入到其他模块中使用
"""
import json
import os
import time
import random
import sqlite3
from datetime import datetime
from typing import Dict, Any, List, Optional, Tuple
import requests
from scripts.wbi_sign import get_wbi_sign
from scripts.utils import get_output_path, get_database_path
# API 地址
POPULAR_API = "https://api.bilibili.com/x/web-interface/popular"
def get_db_connection(year=None):
"""
获取数据库连接,支持按年切分数据库
Args:
year: 指定年份,如果为None则使用当前年份
Returns:
SQLite数据库连接
"""
# 如果未指定年份,使用当前年份
if year is None:
year = datetime.now().year
# 构建基于年份的数据库路径
db_filename = f"bilibili_popular_{year}.db"
db_path = get_database_path(db_filename)
conn = sqlite3.connect(db_path)
# 创建表
create_tables(conn)
print(f"已连接到{year}年的数据库: {db_path}")
return conn
# 获取当前年份的数据库连接
def get_current_db_connection():
"""获取当前年份的数据库连接"""
current_year = datetime.now().year
return get_db_connection(current_year)
# 获取所有年份的数据库列表
def get_all_year_dbs():
"""获取所有年份的数据库列表"""
db_dir = os.path.dirname(get_database_path(""))
db_files = [f for f in os.listdir(db_dir) if f.startswith("bilibili_popular_") and f.endswith(".db")]
years = []
for db_file in db_files:
try:
year = int(db_file.replace("bilibili_popular_", "").replace(".db", ""))
years.append(year)
except ValueError:
continue
return sorted(years)
# 获取多年数据库连接
def get_multi_year_connections(start_year=None, end_year=None):
"""获取多年数据库连接"""
if start_year is None or end_year is None:
years = get_all_year_dbs()
if not years:
# 如果没有找到数据库,返回当前年份
return {datetime.now().year: get_current_db_connection()}
else:
years = range(start_year, end_year + 1)
connections = {}
for year in years:
try:
connections[year] = get_db_connection(year)
except Exception as e:
print(f"连接{year}年数据库出错: {e}")
return connections
def create_tables(conn):
"""创建数据库表"""
cursor = conn.cursor()
# 创建热门视频表
cursor.execute('''
CREATE TABLE IF NOT EXISTS popular_videos (
id INTEGER PRIMARY KEY AUTOINCREMENT,
aid TEXT,
bvid TEXT,
title TEXT,
pubdate INTEGER,
ctime INTEGER,
desc TEXT,
videos INTEGER,
tid INTEGER,
tname TEXT,
copyright INTEGER,
pic TEXT,
duration INTEGER,
owner_mid INTEGER,
owner_name TEXT,
owner_face TEXT,
view_count INTEGER,
danmaku_count INTEGER,
reply_count INTEGER,
favorite_count INTEGER,
coin_count INTEGER,
share_count INTEGER,
like_count INTEGER,
dynamic TEXT,
cid TEXT,
/* 展开dimension字段 */
dimension_width INTEGER,
dimension_height INTEGER,
dimension_rotate INTEGER,
short_link TEXT,
first_frame TEXT,
pub_location TEXT,
cover43 TEXT,
tidv2 INTEGER,
tnamev2 TEXT,
pid_v2 INTEGER,
pid_name_v2 TEXT,
season_type INTEGER,
is_ogv INTEGER,
/* 展开rights字段 */
rights_bp INTEGER,
rights_elec INTEGER,
rights_download INTEGER,
rights_movie INTEGER,
rights_pay INTEGER,
rights_hd5 INTEGER,
rights_no_reprint INTEGER,
rights_autoplay INTEGER,
rights_ugc_pay INTEGER,
rights_is_cooperation INTEGER,
rights_ugc_pay_preview INTEGER,
rights_no_background INTEGER,
rights_arc_pay INTEGER,
rights_pay_free_watch INTEGER,
/* 展开stat字段 */
stat_view INTEGER,
stat_danmaku INTEGER,
stat_reply INTEGER,
stat_favorite INTEGER,
stat_coin INTEGER,
stat_share INTEGER,
stat_now_rank INTEGER,
stat_his_rank INTEGER,
stat_like INTEGER,
stat_dislike INTEGER,
stat_vt INTEGER,
stat_vv INTEGER,
stat_fav_g INTEGER,
stat_like_g INTEGER,
/* 展开rcmd_reason字段 */
rcmd_reason_content TEXT,
rcmd_reason_corner_mark INTEGER,
ogv_info TEXT,
enable_vt INTEGER,
ai_rcmd TEXT,
fetch_time INTEGER,
UNIQUE(aid, bvid, fetch_time)
)
''')
# 创建抓取记录表
cursor.execute('''
CREATE TABLE IF NOT EXISTS fetch_records (
id INTEGER PRIMARY KEY AUTOINCREMENT,
fetch_time INTEGER,
total_fetched INTEGER,
pages_fetched INTEGER,
success INTEGER,
failed_to_save INTEGER DEFAULT 0,
duplicates_skipped INTEGER DEFAULT 0
)
''')
# 创建视频热门跟踪表
cursor.execute('''
CREATE TABLE IF NOT EXISTS popular_video_tracking (
id INTEGER PRIMARY KEY AUTOINCREMENT,
aid TEXT,
bvid TEXT,
title TEXT,
first_seen INTEGER, -- 首次出现时间
last_seen INTEGER, -- 最后一次出现时间
is_active INTEGER, -- 是否仍在热门列表
total_duration INTEGER, -- 累计在热门列表的时间(秒)
highest_rank INTEGER, -- 历史最高排名
lowest_rank INTEGER, -- 历史最低排名
appearances INTEGER DEFAULT 1, -- 出现次数
UNIQUE(aid, bvid)
)
''')
conn.commit()
def insert_video_to_db(conn, video: Dict[str, Any], fetch_time: int, rank: int = 0, auto_commit: bool = False):
"""
将视频信息插入数据库
Args:
conn: 数据库连接
video: 视频数据
fetch_time: 抓取时间戳
rank: 视频排名
auto_commit: 是否自动提交事务,默认为False,用于批量插入时由外部控制事务
"""
cursor = conn.cursor()
# 提取嵌套数据
owner = video.get('owner', {})
stat = video.get('stat', {})
dimension = video.get('dimension', {})
rcmd_reason = video.get('rcmd_reason', {})
rights = video.get('rights', {})
try:
# 插入到主视频表
# 计算参数数量
values = (
video.get('aid'),
video.get('bvid'),
video.get('title'),
video.get('pubdate'),
video.get('ctime'),
video.get('desc'),
video.get('videos'),
video.get('tid'),
video.get('tname'),
video.get('copyright'),
video.get('pic'),
video.get('duration'),
owner.get('mid'),
owner.get('name'),
owner.get('face'),
stat.get('view'),
stat.get('danmaku'),
stat.get('reply'),
stat.get('favorite'),
stat.get('coin'),
stat.get('share'),
stat.get('like'),
video.get('dynamic'),
video.get('cid'),
# dimension字段展开
dimension.get('width'),
dimension.get('height'),
dimension.get('rotate'),
video.get('short_link_v2'), # 使用short_link_v2作为short_link列的值
video.get('first_frame'),
video.get('pub_location'),
video.get('cover43'),
video.get('tidv2'),
video.get('tnamev2'),
video.get('pid_v2'),
video.get('pid_name_v2'),
video.get('season_type'),
1 if video.get('is_ogv') else 0,
# rights字段展开
rights.get('bp'),
rights.get('elec'),
rights.get('download'),
rights.get('movie'),
rights.get('pay'),
rights.get('hd5'),
rights.get('no_reprint'),
rights.get('autoplay'),
rights.get('ugc_pay'),
rights.get('is_cooperation'),
rights.get('ugc_pay_preview'),
rights.get('no_background'),
rights.get('arc_pay'),
rights.get('pay_free_watch'),
# stat字段展开
stat.get('view'),
stat.get('danmaku'),
stat.get('reply'),
stat.get('favorite'),
stat.get('coin'),
stat.get('share'),
stat.get('now_rank'),
stat.get('his_rank'),
stat.get('like'),
stat.get('dislike'),
stat.get('vt'),
stat.get('vv'),
stat.get('fav_g'),
stat.get('like_g'),
# rcmd_reason字段展开
rcmd_reason.get('content'),
rcmd_reason.get('corner_mark'),
# 其他字段
json.dumps(video.get('ogv_info', {}), ensure_ascii=False) if video.get('ogv_info') else None,
video.get('enable_vt'),
json.dumps(video.get('ai_rcmd', {}), ensure_ascii=False) if video.get('ai_rcmd') else None,
fetch_time
)
cursor.execute('''
INSERT OR REPLACE INTO popular_videos (
aid, bvid, title, pubdate, ctime, desc, videos, tid, tname, copyright,
pic, duration, owner_mid, owner_name, owner_face, view_count, danmaku_count,
reply_count, favorite_count, coin_count, share_count, like_count, dynamic,
cid,
dimension_width, dimension_height, dimension_rotate,
short_link, first_frame, pub_location, cover43, tidv2,
tnamev2, pid_v2, pid_name_v2, season_type, is_ogv,
rights_bp, rights_elec, rights_download, rights_movie, rights_pay,
rights_hd5, rights_no_reprint, rights_autoplay, rights_ugc_pay,
rights_is_cooperation, rights_ugc_pay_preview, rights_no_background,
rights_arc_pay, rights_pay_free_watch,
stat_view, stat_danmaku, stat_reply, stat_favorite, stat_coin,
stat_share, stat_now_rank, stat_his_rank, stat_like, stat_dislike,
stat_vt, stat_vv, stat_fav_g, stat_like_g,
rcmd_reason_content, rcmd_reason_corner_mark,
ogv_info, enable_vt, ai_rcmd, fetch_time
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', values)
# 更新跟踪表
update_tracking_info(conn, video, fetch_time, rank)
# 只有在auto_commit为True时才提交事务
if auto_commit:
conn.commit()
except sqlite3.Error as e:
print(f"插入数据库时出错: {e}")
# 如果auto_commit为True,则回滚操作
if auto_commit:
conn.rollback()
raise
def update_tracking_info(conn, video: Dict[str, Any], fetch_time: int, rank: int = 0, auto_commit: bool = False):
"""
更新视频热门跟踪信息
Args:
conn: 数据库连接
video: 视频数据
fetch_time: 抓取时间戳
rank: 视频排名
auto_commit: 是否自动提交事务,默认为False,用于批量插入时由外部控制事务
"""
cursor = conn.cursor()
aid = video.get('aid')
bvid = video.get('bvid')
title = video.get('title')
try:
# 检查视频是否已存在于跟踪表中
cursor.execute(
"SELECT first_seen, last_seen, is_active, highest_rank, lowest_rank, appearances FROM popular_video_tracking WHERE aid = ? AND bvid = ?",
(aid, bvid)
)
result = cursor.fetchone()
if result:
# 视频已存在,更新信息
first_seen, last_seen, is_active, highest_rank, lowest_rank, appearances = result
# 更新最后一次出现时间、出现次数和活跃状态
if last_seen < fetch_time:
cursor.execute(
"UPDATE popular_video_tracking SET last_seen = ?, is_active = 1, appearances = appearances + 1 WHERE aid = ? AND bvid = ?",
(fetch_time, aid, bvid)
)
# 更新排名记录
if rank > 0:
if highest_rank is None or rank < highest_rank:
cursor.execute(
"UPDATE popular_video_tracking SET highest_rank = ? WHERE aid = ? AND bvid = ?",
(rank, aid, bvid)
)
if lowest_rank is None or rank > lowest_rank:
cursor.execute(
"UPDATE popular_video_tracking SET lowest_rank = ? WHERE aid = ? AND bvid = ?",
(rank, aid, bvid)
)
else:
# 新视频,插入记录
cursor.execute('''
INSERT INTO popular_video_tracking (
aid, bvid, title, first_seen, last_seen, is_active,
total_duration, highest_rank, lowest_rank, appearances
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
aid, bvid, title, fetch_time, fetch_time, 1,
0, rank if rank > 0 else None, rank if rank > 0 else None, 1
))
# 只有在auto_commit为True时才提交事务
if auto_commit:
conn.commit()
except sqlite3.Error as e:
print(f"更新跟踪信息时出错: {e}")
# 如果auto_commit为True,则回滚操作
if auto_commit:
conn.rollback()
raise
def update_inactive_videos(conn, fetch_time: int):
"""
更新不再在热门列表中的视频状态
Args:
conn: 数据库连接
fetch_time: 当前抓取时间戳
"""
cursor = conn.cursor()
try:
# 查找所有活跃但在当前抓取中未出现的视频
cursor.execute('''
SELECT aid, bvid, last_seen FROM popular_video_tracking
WHERE is_active = 1 AND last_seen < ?
''', (fetch_time,))
inactive_videos = cursor.fetchall()
# 更新这些视频的状态
for aid, bvid, last_seen in inactive_videos:
total_duration = fetch_time - last_seen
cursor.execute('''
UPDATE popular_video_tracking
SET is_active = 0, total_duration = total_duration + ?
WHERE aid = ? AND bvid = ?
''', (total_duration, aid, bvid))
return len(inactive_videos)
except sqlite3.Error as e:
print(f"更新非活跃视频时出错: {e}")
return 0
def save_fetch_record(conn, fetch_time: int, total_fetched: int, pages_fetched: int, success: bool, failed: int = 0, duplicates: int = 0):
"""
保存抓取记录
Args:
conn: 数据库连接
fetch_time: 抓取时间戳
total_fetched: 总共抓取的视频数
pages_fetched: 抓取的页数
success: 是否成功
failed: 保存失败的数量
duplicates: 重复跳过的数量
"""
cursor = conn.cursor()
try:
cursor.execute('''
INSERT INTO fetch_records (fetch_time, total_fetched, pages_fetched, success, failed_to_save, duplicates_skipped)
VALUES (?, ?, ?, ?, ?, ?)
''', (
fetch_time,
total_fetched,
pages_fetched,
1 if success else 0,
failed,
duplicates
))
conn.commit()
except sqlite3.Error as e:
print(f"保存抓取记录时出错: {e}")
conn.rollback()
def get_popular_videos(page_num: int = 1, page_size: int = 20) -> Dict[str, Any]:
"""
获取B站当前热门视频列表
Args:
page_num: 页码,默认为1
page_size: 每页视频数量,默认为20
Returns:
Dict[str, Any]: 热门视频列表数据
"""
# 基础参数
params = {
"ps": page_size,
"pn": page_num,
"web_location": "333.934" # 网页位置参数
}
# 添加 WBI 签名
signed_params = get_wbi_sign(params)
# 请求头
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"Referer": "https://www.bilibili.com/",
"Accept": "application/json, text/plain, */*",
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
"Origin": "https://www.bilibili.com"
}
try:
# 发送请求
response = requests.get(
POPULAR_API,
params=signed_params,
headers=headers,
timeout=30
)
response.raise_for_status()
# 解析响应数据
data = response.json()
return data
except requests.RequestException as e:
print(f"网络请求失败: {e}")
return {"code": -1, "message": f"网络请求失败: {e}", "ttl": 0, "data": None}
except json.JSONDecodeError as e:
print(f"解析JSON数据失败: {e}")
return {"code": -1, "message": f"解析JSON数据失败: {e}", "ttl": 0, "data": None}
except Exception as e:
print(f"获取热门视频列表失败: {e}")
return {"code": -1, "message": f"获取热门视频列表失败: {e}", "ttl": 0, "data": None}
def get_all_popular_videos(
page_size: int = 20,
max_pages: int = 100,
save_to_db: bool = True,
progress_callback = None
) -> Tuple[List[Dict[str, Any]], bool, Dict[str, Any]]:
"""
获取B站所有热门视频,直到没有更多数据为止
Args:
page_size: 每页视频数量,默认为20
max_pages: 最大获取页数,防止无限循环,默认为100
save_to_db: 是否保存到数据库,默认为True
progress_callback: 进度回调函数,可选。接受当前进度(百分比)、状态消息、当前页码和总页数作为参数
Returns:
Tuple[List[Dict[str, Any]], bool, Dict[str, Any]]:
- 所有热门视频列表
- 是否成功获取完所有数据
- 抓取统计信息
"""
all_videos = []
total_items = 0
page_num = 1
has_more = True
conn = None
fetch_time = int(time.time())
failed_count = 0
duplicate_count = 0
inactive_count = 0
# 用于收集所有要保存的视频数据
videos_to_save = []
try:
# 如果需要保存到数据库,建立连接
if save_to_db:
conn = get_current_db_connection()
while has_more and page_num <= max_pages:
print(f"正在获取第 {page_num} 页数据...")
# 报告进度(如果提供了回调函数)
estimated_progress = min(95, int((page_num / max_pages) * 100))
if progress_callback:
progress_callback(
estimated_progress,
f"正在获取第 {page_num} 页数据...",
page_num,
max_pages
)
# 随机延迟,模拟人类行为
delay = random.uniform(3.0, 7.0)
print(f"等待 {delay:.2f} 秒...")
time.sleep(delay)
# 获取当前页数据
data = get_popular_videos(page_num=page_num, page_size=page_size)
# 检查是否成功
if data["code"] != 0 or not data.get("data"):
print(f"获取第 {page_num} 页数据失败: {data.get('message', '未知错误')}")
# 报告错误(如果提供了回调函数)
if progress_callback:
progress_callback(
estimated_progress,
f"获取第 {page_num} 页数据失败: {data.get('message', '未知错误')}",
page_num,
max_pages,
success=False
)
# 保存抓取记录
if save_to_db and conn:
save_fetch_record(conn, fetch_time, len(all_videos), page_num - 1, False, failed_count, duplicate_count)
fetch_stats = {
"status": "error",
"message": data.get('message', '未知错误'),
"pages_fetched": page_num - 1,
"total_items": total_items,
"saved_successfully": len(all_videos),
"failed_to_save": failed_count,
"duplicates_skipped": duplicate_count,
"fetch_time": datetime.fromtimestamp(fetch_time).strftime("%Y-%m-%d %H:%M:%S")
}
return all_videos, False, fetch_stats
# 提取视频列表
video_list = data["data"].get("list", [])
if video_list:
total_items += len(video_list)
# 收集视频数据,但不立即写入数据库
if save_to_db:
for index, video in enumerate(video_list):
# 计算当前视频的排名
rank = (page_num - 1) * page_size + index + 1
# 将视频和排名信息添加到待保存列表
videos_to_save.append((video, rank))
# 提取视频信息并添加到总列表中
videos = extract_video_info(data)
all_videos.extend(videos)
# 输出当前获取进度
print(f"已获取 {len(all_videos)} 个视频")
# 报告进度(如果提供了回调函数)
if progress_callback:
progress_callback(
estimated_progress,
f"已获取 {len(all_videos)} 个视频",
page_num,
max_pages
)
# 检查是否还有更多数据
has_more = not data.get("data", {}).get("no_more", True)
# 准备获取下一页
if has_more:
page_num += 1
else:
print("已获取全部热门视频数据")
# 报告进度(如果提供了回调函数)
if progress_callback:
progress_callback(
95,
"已获取全部热门视频数据,正在处理...",
page_num,
page_num
)
# 一次性批量保存所有视频数据到数据库
if save_to_db and conn and videos_to_save:
print(f"开始一次性保存 {len(videos_to_save)} 个视频到数据库...")
# 报告进度(如果提供了回调函数)
if progress_callback:
progress_callback(
96,
f"正在保存 {len(videos_to_save)} 个视频到数据库...",
page_num,
page_num
)
# 开始事务
conn.execute("BEGIN TRANSACTION")
try:
cursor = conn.cursor()
for video, rank in videos_to_save:
try:
# 检查是否已存在相同视频记录
cursor.execute(
"SELECT 1 FROM popular_videos WHERE aid = ? AND bvid = ? AND fetch_time = ?",
(video.get('aid'), video.get('bvid'), fetch_time)
)
exists = cursor.fetchone() is not None
if exists:
duplicate_count += 1
print(f"跳过重复视频: {video.get('bvid')} - {video.get('title')}")
else:
# 插入视频数据,不自动提交事务
insert_video_to_db(conn, video, fetch_time, rank, auto_commit=False)
except Exception as e:
failed_count += 1
print(f"保存视频 {video.get('bvid')} 时出错: {e}")
# 提交事务
conn.execute("COMMIT")
print(f"成功保存 {len(videos_to_save) - failed_count - duplicate_count} 个视频到数据库")
except Exception as e:
# 回滚事务
conn.execute("ROLLBACK")
print(f"批量保存视频时出错,已回滚: {e}")
failed_count = len(videos_to_save)
# 报告进度(如果提供了回调函数)
if progress_callback:
progress_callback(
97,
f"已保存 {len(videos_to_save) - failed_count - duplicate_count} 个视频到数据库",
page_num,
page_num
)
# 更新不再活跃的视频
if save_to_db and conn:
inactive_count = update_inactive_videos(conn, fetch_time)
print(f"已更新 {inactive_count} 个不再活跃的视频")
# 报告进度(如果提供了回调函数)
if progress_callback:
progress_callback(
98,
f"已更新 {inactive_count} 个不再活跃的视频",
max_pages,
max_pages
)
# 保存抓取记录
if save_to_db and conn:
save_fetch_record(conn, fetch_time, len(all_videos), page_num - 1, True, failed_count, duplicate_count)
fetch_stats = {
"status": "success",
"total_videos": len(all_videos),
"fetch_stats": {
"pages_fetched": page_num - 1,
"total_items": total_items,
"saved_successfully": len(all_videos) - failed_count - duplicate_count,
"failed_to_save": failed_count,
"duplicates_skipped": duplicate_count,
"inactive_updated": inactive_count,
"fetch_time": datetime.fromtimestamp(fetch_time).strftime("%Y-%m-%d %H:%M:%S")
}
}
# 报告完成(如果提供了回调函数)
if progress_callback:
progress_callback(
100,
"热门视频获取完成",
max_pages,
max_pages,
success=True
)
return all_videos, True, fetch_stats
except Exception as e:
print(f"获取所有热门视频时出错: {e}")
# 报告错误(如果提供了回调函数)
if progress_callback:
progress_callback(
min(95, int((page_num / max_pages) * 100)),
f"获取热门视频时出错: {str(e)}",
page_num,
max_pages,
success=False
)
# 保存抓取记录
if save_to_db and conn:
save_fetch_record(conn, fetch_time, len(all_videos), page_num - 1, False, failed_count, duplicate_count)
fetch_stats = {
"status": "error",
"message": str(e),
"pages_fetched": page_num - 1,
"total_items": total_items,
"saved_successfully": len(all_videos) - failed_count - duplicate_count,
"failed_to_save": failed_count,
"duplicates_skipped": duplicate_count,
"fetch_time": datetime.fromtimestamp(fetch_time).strftime("%Y-%m-%d %H:%M:%S")
}
return all_videos, False, fetch_stats
finally:
# 关闭数据库连接
if conn:
conn.close()
def extract_video_info(data: Dict[str, Any]) -> List[Dict[str, Any]]:
"""
从返回的数据中提取视频基本信息
Args:
data: API返回的数据
Returns:
List[Dict[str, Any]]: 视频信息列表
"""
if data["code"] != 0 or not data.get("data") or not data["data"].get("list"):
return []
videos = []
for video in data["data"]["list"]:
videos.append({
"aid": video.get("aid"),
"bvid": video.get("bvid"),
"title": video.get("title"),
"author": video.get("owner", {}).get("name"),
"mid": video.get("owner", {}).get("mid"),
"play": video.get("stat", {}).get("view"),
"favorite": video.get("stat", {}).get("favorite"),
"coin": video.get("stat", {}).get("coin"),
"share": video.get("stat", {}).get("share"),
"like": video.get("stat", {}).get("like"),
"duration": video.get("duration"),
"pubdate": video.get("pubdate"),
"description": video.get("desc"),
"tname": video.get("tname"),
"short_link": video.get("short_link_v2")
})
return videos
def print_popular_videos(video_list: List[Dict[str, Any]], max_display: int = None) -> None:
"""
打印视频信息
Args:
video_list: 视频信息列表
max_display: 最大显示数量,默认为None,表示显示所有视频
"""
if not video_list:
print("未获取到视频信息")
return
# 如果指定了最大显示数量,则限制显示
if max_display is not None and max_display > 0:
display_list = video_list[:max_display]
print(f"\n共获取到 {len(video_list)} 个热门视频,显示前 {len(display_list)} 个:")
else:
display_list = video_list
print(f"\n共获取到 {len(video_list)} 个热门视频:")
print("-" * 80)
for i, video in enumerate(display_list, 1):
try:
# 格式化时间
pubdate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(video.get("pubdate", 0)))
# 格式化播放量
play_count = video.get("play", 0)
if play_count and play_count > 10000:
play_count = f"{play_count / 10000:.1f}万"
print(f"{i}. {video.get('title')}")
print(f" UP主: {video.get('author')} (UID: {video.get('mid')})")
print(f" 播放: {play_count} | 点赞: {video.get('like')} | 投币: {video.get('coin')} | 收藏: {video.get('favorite')}")
print(f" BV号: {video.get('bvid')} | 分区: {video.get('tname')} | 发布时间: {pubdate}")
print(f" 链接: {video.get('short_link')}")
print("-" * 80)
except Exception as e:
print(f"打印视频信息时出错: {e}")
continue
def query_recent_videos(limit: int = 20) -> List[Dict[str, Any]]:
"""
从数据库查询最近获取的视频
Args:
limit: 限制返回的数量
Returns:
List[Dict[str, Any]]: 视频信息列表
"""
conn = None
try:
conn = get_current_db_connection()
cursor = conn.cursor()
# 获取最新一次抓取的时间
cursor.execute('''
SELECT MAX(fetch_time) FROM fetch_records WHERE success = 1
''')
result = cursor.fetchone()
if not result or not result[0]:
return []
latest_fetch_time = result[0]
# 查询这次抓取的视频
cursor.execute('''
SELECT
aid, bvid, title, pubdate, owner_mid, owner_name,
view_count, favorite_count, coin_count, share_count, like_count,
duration, tname, short_link
FROM popular_videos
WHERE fetch_time = ?
ORDER BY view_count DESC
LIMIT ?
''', (latest_fetch_time, limit))
rows = cursor.fetchall()
videos = []
for row in rows:
videos.append({
"aid": row[0],
"bvid": row[1],
"title": row[2],
"pubdate": row[3],
"mid": row[4],
"author": row[5],
"play": row[6],
"favorite": row[7],
"coin": row[8],
"share": row[9],
"like": row[10],
"duration": row[11],
"tname": row[12],
"short_link": row[13]
})
return videos
except sqlite3.Error as e:
print(f"查询数据库时出错: {e}")
return []
finally:
if conn:
conn.close()
def get_fetch_history(limit: int = 10) -> List[Dict[str, Any]]:
"""
获取抓取历史记录
Args:
limit: 限制返回的数量
Returns:
List[Dict[str, Any]]: 抓取历史列表
"""
connections = {}
history = []
try:
# 获取所有年份的数据库连接
connections = get_multi_year_connections()
# 从每个年份的数据库中查询抓取历史
for year, conn in connections.items():
cursor = conn.cursor()
cursor.execute('''
SELECT fetch_time, total_fetched, pages_fetched, success
FROM fetch_records
ORDER BY fetch_time DESC
LIMIT ?
''', (limit,))
rows = cursor.fetchall()
for row in rows:
fetch_time = row[0]
history.append({
"fetch_time": fetch_time,
"fetch_time_str": datetime.fromtimestamp(fetch_time).strftime("%Y-%m-%d %H:%M:%S"),
"total_fetched": row[1],
"pages_fetched": row[2],
"success": bool(row[3]),
"year": year
})
# 按照fetch_time降序排序
history.sort(key=lambda x: x["fetch_time"], reverse=True)
# 限制返回数量
return history[:limit]
except sqlite3.Error as e:
print(f"查询抓取历史时出错: {e}")
return []
finally:
for conn in connections.values():
if conn:
conn.close()
def get_video_tracking_stats(limit: int = 20) -> List[Dict[str, Any]]:
"""
获取视频热门跟踪统计,修复重复视频问题
Args:
limit: 限制返回的数量
Returns:
List[Dict[str, Any]]: 视频热门统计信息(去重后)
"""
connections = {}
stats_dict = {} # 使用字典存储结果,以bvid为键避免重复
try:
# 获取所有年份的数据库连接
connections = get_multi_year_connections()
for year, conn in connections.items():
cursor = conn.cursor()
# 修改查询,使用DISTINCT消除重复,并确保只选择最新的记录
cursor.execute('''
WITH RankedVideos AS (
SELECT
t.aid, t.bvid, t.title, t.first_seen, t.last_seen,
t.is_active, t.total_duration, t.highest_rank,
t.lowest_rank, t.appearances,
p.owner_name,
ROW_NUMBER() OVER (PARTITION BY t.bvid ORDER BY t.last_seen DESC) as rn
FROM popular_video_tracking t
LEFT JOIN popular_videos p ON t.bvid = p.bvid
ORDER BY
CASE WHEN t.is_active = 1 THEN (? - t.first_seen) + t.total_duration
ELSE t.total_duration END DESC
)
SELECT * FROM RankedVideos WHERE rn = 1
LIMIT ?
''', (int(time.time()), limit))
rows = cursor.fetchall()
for row in rows:
bvid = row[1] # 获取视频bvid
# 如果该视频已经添加过(可能来自其他年份的数据库),则跳过
if bvid in stats_dict:
continue
first_seen_date = datetime.fromtimestamp(row[3]).strftime("%Y-%m-%d %H:%M:%S")
last_seen_date = datetime.fromtimestamp(row[4]).strftime("%Y-%m-%d %H:%M:%S")
# 计算持续时间显示
total_seconds = row[6]
if row[5] == 1: # 如果仍然活跃,加上当前时间差
total_seconds += (int(time.time()) - row[4])
days = total_seconds // (24 * 3600)
hours = (total_seconds % (24 * 3600)) // 3600
minutes = (total_seconds % 3600) // 60
duration_str = ""
if days > 0:
duration_str += f"{days}天"
if hours > 0 or days > 0:
duration_str += f"{hours}小时"
duration_str += f"{minutes}分钟"
stats_dict[bvid] = {
"aid": row[0],
"bvid": bvid,
"title": row[2],
"first_seen": row[3],
"first_seen_str": first_seen_date,
"last_seen": row[4],
"last_seen_str": last_seen_date,
"is_active": bool(row[5]),
"total_duration": row[6],
"duration_str": duration_str,
"highest_rank": row[7],
"lowest_rank": row[8],
"appearances": row[9],
"author": row[10]
}
# 将字典值转为列表并排序(按total_duration降序)
stats = list(stats_dict.values())
stats.sort(key=lambda x: x["total_duration"], reverse=True)
# 限制返回数量
return stats[:limit]
except sqlite3.Error as e:
print(f"查询视频跟踪统计时出错: {e}")
return []
finally:
for conn in connections.values():
if conn:
conn.close()
def cleanup_inactive_video_records():
"""
清理已经不在热门列表的视频数据,只保留首条和末条记录
此函数执行以下操作:
1. 找出所有已经不在热门列表的视频(is_active=0)
2. 对每个视频,保留其第一条和最后一条记录
3. 删除该视频的所有中间记录
Returns:
dict: 清理统计信息
"""
connections = {}
stats = {
"processed_videos": 0,
"deleted_records": 0,
"error_count": 0,
"year_stats": {}
}
try:
# 获取所有年份的数据库连接
connections = get_multi_year_connections()
for year, conn in connections.items():
year_stats = {
"processed_videos": 0,
"deleted_records": 0
}
cursor = conn.cursor()
cursor.execute("BEGIN TRANSACTION")
try:
# 1. 查找所有已经不在热门列表的视频
cursor.execute("""
SELECT bvid
FROM popular_video_tracking
WHERE is_active = 0
""")
inactive_videos = [row[0] for row in cursor.fetchall()]
print(f"{year}年数据库中找到 {len(inactive_videos)} 个不活跃视频")
for bvid in inactive_videos:
# 2. 获取该视频的所有记录时间戳,按时间排序
cursor.execute("""
SELECT fetch_time
FROM popular_videos
WHERE bvid = ?
ORDER BY fetch_time
""", (bvid,))
fetch_times = [row[0] for row in cursor.fetchall()]
if len(fetch_times) <= 2:
# 如果只有两条或更少记录,不需要清理
continue
# 3. 保留第一条和最后一条记录,删除中间记录
first_time = fetch_times[0]
last_time = fetch_times[-1]
# 构建要删除的时间列表(排除首尾)
times_to_delete = fetch_times[1:-1]
# 将时间列表转换为IN子句可用的格式
placeholders = ','.join(['?'] * len(times_to_delete))
# 4. 执行删除
cursor.execute(f"""
DELETE FROM popular_videos
WHERE bvid = ? AND fetch_time IN ({placeholders})
""", [bvid] + times_to_delete)
deleted_count = cursor.rowcount
# 6. 更新统计信息
year_stats["processed_videos"] += 1
year_stats["deleted_records"] += deleted_count
# 7. 输出清理信息
if deleted_count > 0:
print(f"清理视频 {bvid}: 删除了 {deleted_count} 条记录,保留首条({first_time})和末条({last_time})记录")
# 提交事务
cursor.execute("COMMIT")
# 更新总统计信息
stats["processed_videos"] += year_stats["processed_videos"]
stats["deleted_records"] += year_stats["deleted_records"]
stats["year_stats"][year] = year_stats
except Exception as e:
# 发生错误,回滚事务
cursor.execute("ROLLBACK")
print(f"{year}年数据清理时出错: {e}")
stats["error_count"] += 1
stats["year_stats"][year] = {"error": str(e)}
print(f"数据清理完成: 处理了 {stats['processed_videos']} 个视频,删除了 {stats['deleted_records']} 条记录")
# 执行VACUUM操作回收空间
for year, conn in connections.items():
try:
print(f"正在对{year}年数据库执行VACUUM操作...")
conn.execute("VACUUM")
print(f"{year}年数据库VACUUM操作完成")
except Exception as e:
print(f"{year}年数据库VACUUM操作失败: {e}")
return stats
except Exception as e:
print(f"执行数据清理时出错: {e}")
stats["error"] = str(e)
return stats
finally:
# 关闭所有数据库连接
for conn in connections.values():
if conn:
conn.close()
def schedule_daily_cleanup():
"""
设置每日数据清理定时任务,应在应用启动时调用此函数
"""
import threading
import schedule
import time
def run_cleanup():
print(f"===== 开始执行每日数据清理,时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} =====")
stats = cleanup_inactive_video_records()
print(f"===== 每日数据清理完成,时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} =====")
print(f"清理统计: {stats}")
def run_scheduler():
# 设置每天凌晨3点执行清理
schedule.every().day.at("03:00").do(run_cleanup)
while True:
schedule.run_pending()
time.sleep(60) # 每分钟检查一次
# 创建并启动调度器线程
scheduler_thread = threading.Thread(target=run_scheduler)
scheduler_thread.daemon = True # 设为守护线程,主程序退出时自动结束
scheduler_thread.start()
print("已设置每日数据清理定时任务,将在每天凌晨3点执行")
# 也可以提供一个立即执行的选项
return scheduler_thread
def main() -> None:
"""主函数"""
try:
print("正在获取B站热门视频列表...")
# 默认获取所有热门视频
videos, success, fetch_stats = get_all_popular_videos(page_size=20)
if success and videos:
# 只显示前20个视频
print_popular_videos(videos, max_display=20)
# 提示用户是否要显示所有视频
if len(videos) > 20:
choice = input("\n是否显示所有获取到的视频?(y/n): ")
if choice.lower() == 'y':
print_popular_videos(videos)
elif not videos:
print("未获取到任何视频数据")
except KeyboardInterrupt:
print("\n程序已被用户中断")
except Exception as e:
print(f"程序执行出错: {e}")
finally:
print("\n程序执行完毕")
if __name__ == "__main__":
main()
|
2977094657/BilibiliHistoryFetcher
| 7,262
|
scripts/task_manager.py
|
import os
import sqlite3
import sys
import argparse
from datetime import datetime
def get_db_connection():
"""获取数据库连接"""
# 使用相对路径,假设脚本在项目根目录或scripts目录下运行
script_dir = os.path.dirname(os.path.abspath(__file__))
project_root = os.path.dirname(script_dir) if os.path.basename(script_dir) == 'scripts' else script_dir
db_path = os.path.join(project_root, 'output', 'database', 'scheduler.db')
if not os.path.exists(db_path):
print(f"错误: 数据库文件不存在: {db_path}")
sys.exit(1)
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row
return conn
def list_tasks():
"""列出所有任务及其状态"""
conn = get_db_connection()
cursor = conn.cursor()
cursor.execute("""
SELECT t.task_id, t.name, t.task_type, t.enabled, t.schedule_type, t.schedule_time,
t.last_run_time, t.next_run_time, t.last_status,
(SELECT GROUP_CONCAT(depends_on, ', ') FROM task_dependencies WHERE task_id = t.task_id) as dependencies
FROM tasks t
ORDER BY
CASE t.task_type WHEN 'main' THEN 0 ELSE 1 END,
t.parent_id,
t.sequence_number
""")
tasks = cursor.fetchall()
print("\n=== 任务列表 ===")
print(f"总计 {len(tasks)} 个任务\n")
# 打印所有主任务和它们的子任务
main_tasks = [t for t in tasks if t['task_type'] == 'main']
for main_task in main_tasks:
print(f"【{main_task['task_id']}】({main_task['name']}) - " +
f"{'启用' if main_task['enabled'] else '禁用'}")
print(f" 类型: 主任务, 调度: {main_task['schedule_type']}")
if main_task['schedule_time']:
print(f" 执行时间: {main_task['schedule_time']}")
print(f" 上次执行: {main_task['last_run_time'] or '未执行'}")
print(f" 下次执行: {main_task['next_run_time'] or '未计划'}")
print(f" 上次状态: {main_task['last_status'] or '未知'}")
# 获取这个主任务的子任务
sub_tasks = [t for t in tasks if t['task_type'] == 'sub' and t['dependencies'] and main_task['task_id'] in t['dependencies']]
if sub_tasks:
print("\n 子任务:")
for idx, sub_task in enumerate(sub_tasks, 1):
print(f" {idx}. {sub_task['task_id']} ({sub_task['name']}) - " +
f"{'启用' if sub_task['enabled'] else '禁用'}")
# 添加一个空行分隔不同的主任务
print()
conn.close()
def enable_task(task_id, enable=True):
"""启用或禁用任务"""
conn = get_db_connection()
cursor = conn.cursor()
# 检查任务是否存在
cursor.execute("SELECT task_id, name, task_type, enabled FROM tasks WHERE task_id = ?", (task_id,))
task = cursor.fetchone()
if not task:
print(f"错误: 任务 '{task_id}' 不存在")
conn.close()
return False
current_status = bool(task['enabled'])
if current_status == enable:
print(f"任务 '{task_id}' 已经{'启用' if enable else '禁用'},无需更改")
conn.close()
return True
# 更新任务状态
now = datetime.now().isoformat()
cursor.execute("""
UPDATE tasks
SET enabled = ?, last_modified = ?
WHERE task_id = ?
""", (1 if enable else 0, now, task_id))
conn.commit()
conn.close()
print(f"任务 '{task_id}' ({task['name']}) 已{'启用' if enable else '禁用'}")
return True
def get_task_details(task_id):
"""获取任务详情"""
conn = get_db_connection()
cursor = conn.cursor()
# 查询任务基本信息
cursor.execute("""
SELECT * FROM tasks WHERE task_id = ?
""", (task_id,))
task = cursor.fetchone()
if not task:
print(f"错误: 任务 '{task_id}' 不存在")
conn.close()
return False
print(f"\n=== 任务详情: {task_id} ===")
print(f"名称: {task['name']}")
print(f"类型: {'主任务' if task['task_type'] == 'main' else '子任务'}")
print(f"状态: {'启用' if task['enabled'] else '禁用'}")
print(f"接口: {task['method']} {task['endpoint']}")
print(f"调度类型: {task['schedule_type']}")
if task['schedule_time']:
print(f"调度时间: {task['schedule_time']}")
if task['schedule_delay']:
print(f"延迟时间: {task['schedule_delay']}秒")
print(f"上次执行: {task['last_run_time'] or '未执行'}")
print(f"下次执行: {task['next_run_time'] or '未计划'}")
print(f"执行状态: {task['last_status'] or '未知'}")
print(f"总执行次数: {task['total_runs']}")
print(f"成功次数: {task['success_runs']}")
print(f"失败次数: {task['fail_runs']}")
if task['last_error']:
print(f"最近错误: {task['last_error']}")
# 查询依赖关系
cursor.execute("""
SELECT depends_on FROM task_dependencies WHERE task_id = ?
""", (task_id,))
dependencies = [row['depends_on'] for row in cursor.fetchall()]
if dependencies:
print(f"\n依赖任务: {', '.join(dependencies)}")
# 查询被依赖关系
cursor.execute("""
SELECT task_id FROM task_dependencies WHERE depends_on = ?
""", (task_id,))
dependent_tasks = [row['task_id'] for row in cursor.fetchall()]
if dependent_tasks:
print(f"被依赖任务: {', '.join(dependent_tasks)}")
# 如果是主任务,查询子任务
if task['task_type'] == 'main':
cursor.execute("""
SELECT task_id, name, enabled, sequence_number
FROM tasks
WHERE parent_id = ?
ORDER BY sequence_number
""", (task_id,))
subtasks = cursor.fetchall()
if subtasks:
print("\n子任务:")
for idx, subtask in enumerate(subtasks, 1):
print(f" {idx}. {subtask['task_id']} ({subtask['name']}) - " +
f"{'启用' if subtask['enabled'] else '禁用'}")
# 获取最近的执行记录
cursor.execute("""
SELECT * FROM task_executions
WHERE task_id = ?
ORDER BY start_time DESC
LIMIT 5
""", (task_id,))
executions = cursor.fetchall()
if executions:
print("\n最近执行记录:")
for idx, exec_record in enumerate(executions, 1):
print(f" {idx}. 时间: {exec_record['start_time']}")
print(f" 状态: {exec_record['status']}")
print(f" 耗时: {exec_record['duration']}秒")
if exec_record['error_message']:
print(f" 错误: {exec_record['error_message']}")
conn.close()
return True
def main():
parser = argparse.ArgumentParser(description='任务管理工具')
subparsers = parser.add_subparsers(dest='command', help='命令')
# 列出所有任务
list_parser = subparsers.add_parser('list', help='列出所有任务')
# 启用任务
enable_parser = subparsers.add_parser('enable', help='启用任务')
enable_parser.add_argument('task_id', help='任务ID')
# 禁用任务
disable_parser = subparsers.add_parser('disable', help='禁用任务')
disable_parser.add_argument('task_id', help='任务ID')
# 获取任务详情
details_parser = subparsers.add_parser('details', help='获取任务详情')
details_parser.add_argument('task_id', help='任务ID')
args = parser.parse_args()
if args.command == 'list':
list_tasks()
elif args.command == 'enable':
enable_task(args.task_id, True)
elif args.command == 'disable':
enable_task(args.task_id, False)
elif args.command == 'details':
get_task_details(args.task_id)
else:
parser.print_help()
if __name__ == "__main__":
main()
|
2977094657/BilibiliHistoryFetcher
| 7,139
|
scripts/utils.py
|
import os
import sqlite3
import sys
from datetime import datetime
from typing import Dict, Any
import yaml
from loguru import logger
# 全局变量,用于标记日志系统是否已初始化
_logger_initialized = False
def setup_logger(log_level: str = "INFO") -> Dict:
"""
统一的日志系统初始化函数
Args:
log_level: 日志级别,默认为INFO
Returns:
包含日志路径信息的字典
"""
global _logger_initialized
# 获取当前日志文件路径
current_date = datetime.now().strftime("%Y/%m/%d")
year_month = current_date.rsplit("/", 1)[0] # 年/月 部分
day_only = current_date.split('/')[-1] # 只取日期中的"日"部分
log_dir = f'output/logs/{year_month}/{day_only}'
# 确保初始日志目录存在
os.makedirs(log_dir, exist_ok=True)
# 如果日志系统已初始化,直接返回
if _logger_initialized:
main_log_file = f'{log_dir}/{day_only}.log'
error_log_file = f'{log_dir}/error_{day_only}.log'
return {
"log_dir": log_dir,
"main_log_file": main_log_file,
"error_log_file": error_log_file
}
# 移除默认处理器
logger.remove()
# 配置全局上下文信息
logger.configure(extra={"app_name": "BilibiliHistoryFetcher", "version": "1.0.0"})
# 添加控制台处理器(仅INFO级别以上,只显示消息,无时间戳等)
logger.add(
sys.stdout,
level="INFO",
format="<green>{message}</green>",
filter=lambda record: (
# 只有以特定字符开头的信息才输出到控制台
isinstance(record["message"], str) and
record["message"].startswith(("===", "正在", "已", "成功", "错误:", "警告:"))
),
enqueue=True, # 确保控制台输出也是进程安全的
diagnose=False # 禁用诊断以避免日志循环
)
# 使用动态路径格式,确保日志按日期自动分割到正确的目录中
# {time:YYYY} - 年份目录
# {time:MM} - 月份目录
# {time:DD} - 日期目录和文件名
dynamic_log_path = "output/logs/{time:YYYY}/{time:MM}/{time:DD}/{time:DD}.log"
# 添加文件处理器(完整日志信息)
logger.add(
dynamic_log_path, # 使用动态路径
level=log_level,
format="[{time:YYYY-MM-DD HH:mm:ss}] [{level}] [{extra[app_name]}] [v{extra[version]}] [进程:{process}] [线程:{thread}] [{name}] [{file.name}:{line}] [{function}] {message}\n{exception}",
encoding="utf-8",
enqueue=True, # 启用进程安全的队列
diagnose=False, # 禁用诊断信息,避免不必要的栈跟踪导致的死锁
backtrace=False, # 禁用异常回溯,避免不必要的栈跟踪
rotation="00:00", # 每天午夜轮转
retention="30 days", # 保留30天的日志
compression="zip" # 压缩旧日志
)
# 错误日志也使用动态路径
dynamic_error_log_path = "output/logs/{time:YYYY}/{time:MM}/{time:DD}/error_{time:DD}.log"
# 专门用于记录错误级别日志的处理器
logger.add(
dynamic_error_log_path, # 使用动态路径
level="ERROR", # 只记录ERROR及以上级别
format="[{time:YYYY-MM-DD HH:mm:ss}] [{level}] [{extra[app_name]}] [{name}] [{file.name}:{line}] [{function}] {message}\n{exception}",
encoding="utf-8",
enqueue=True,
diagnose=False, # 禁用诊断信息
backtrace=False, # 禁用异常回溯
rotation="00:00", # 每天午夜轮转
retention="30 days",
compression="zip"
)
# 标记日志系统已初始化
_logger_initialized = True
# 记录一条启动日志,测试日志系统
logger.info("=== 日志系统初始化完成 ===")
main_log_file = f'{log_dir}/{day_only}.log'
error_log_file = f'{log_dir}/error_{day_only}.log'
return {
"log_dir": log_dir,
"main_log_file": main_log_file,
"error_log_file": error_log_file
}
# 初始化日志系统
setup_logger()
def get_base_path() -> str:
"""获取项目基础路径"""
if getattr(sys, 'frozen', False):
# 如果是打包后的exe运行,返回exe所在目录
return os.path.dirname(sys.executable)
else:
# 如果是直接运行python脚本
return os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def get_config_path(config_file: str) -> str:
"""
获取配置文件路径
Args:
config_file: 配置文件名
Returns:
配置文件的完整路径
"""
if getattr(sys, 'frozen', False):
# 如果是打包后的exe运行,配置文件在_internal/config目录中
base_path = os.path.dirname(sys.executable)
return os.path.join(base_path, '_internal', 'config', config_file)
else:
# 如果是直接运行python脚本
base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
return os.path.join(base_path, 'config', config_file)
def load_config() -> Dict[str, Any]:
"""加载配置文件并验证"""
try:
config_path = get_config_path('config.yaml')
if not os.path.exists(config_path):
# 打印更多调试信息
base_path = get_base_path()
logger.debug(f"\n=== 配置文件信息 ===")
logger.debug(f"当前基础路径: {base_path}")
logger.debug(f"尝试加载配置文件: {config_path}")
logger.debug(f"当前目录内容: {os.listdir(base_path)}")
if os.path.exists(os.path.dirname(config_path)):
logger.debug(f"配置目录内容: {os.listdir(os.path.dirname(config_path))}")
logger.debug("=====================\n")
raise FileNotFoundError(f"配置文件不存在: {config_path}")
with open(config_path, 'r', encoding='utf-8') as f:
config = yaml.safe_load(f)
# 验证邮件配置
email_config = config.get('email', {})
required_fields = ['smtp_server', 'smtp_port', 'sender', 'password', 'receiver']
missing_fields = [field for field in required_fields if not email_config.get(field)]
if missing_fields:
raise ValueError(f"邮件配置缺少必要字段: {', '.join(missing_fields)}")
return config
except Exception as e:
logger.error(f"加载配置文件失败: {str(e)}")
raise
def get_output_path(*paths: str) -> str:
"""
获取输出文件路径
Args:
*paths: 路径片段
Returns:
完整的输出路径
"""
# 总是使用exe所在目录(或项目根目录)作为基础路径
base_path = get_base_path()
# 基础输出目录
output_dir = os.path.join(base_path, 'output')
# 创建基础输出目录
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# 组合完整路径
full_path = os.path.join(output_dir, *paths)
# 确保父目录存在
os.makedirs(os.path.dirname(full_path), exist_ok=True)
return full_path
def get_database_path(*paths: str) -> str:
"""
获取数据库文件路径
Args:
*paths: 路径片段
Returns:
完整的数据库路径
"""
# 总是使用exe所在目录(或项目根目录)作为基础路径
base_path = get_base_path()
# 基础数据库目录
database_dir = os.path.join(base_path, 'output', 'database')
# 创建基础数据库目录
if not os.path.exists(database_dir):
os.makedirs(database_dir)
# 组合完整路径
full_path = os.path.join(database_dir, *paths)
# 确保父目录存在
parent_dir = os.path.dirname(full_path)
if parent_dir != database_dir: # 避免重复创建database_dir
os.makedirs(parent_dir, exist_ok=True)
return full_path
def get_logs_path() -> str:
"""
获取当前日期的日志文件路径
根据当前日期动态生成日志文件路径,与setup_logger函数中的动态路径格式保持一致
Returns:
str: 当前日期对应的日志文件路径
"""
current_time = datetime.now()
year = str(current_time.year)
month = f"{current_time.month:02d}"
day = f"{current_time.day:02d}"
# 构建与setup_logger中动态路径格式一致的路径
log_path = f"output/logs/{year}/{month}/{day}/{day}.log"
# 确保日志目录存在
log_dir = os.path.dirname(log_path)
os.makedirs(log_dir, exist_ok=True)
return log_path
def get_db():
"""获取数据库连接"""
db_path = get_database_path('bilibili_history.db')
return sqlite3.connect(db_path)
|
2977094657/BilibiliHistoryFetcher
| 1,732
|
scripts/yutto_runner.py
|
import asyncio
import io
import sys
from typing import AsyncGenerator
from yutto.__main__ import main as _YUTTO_MAIN
class _AsyncWriter(io.StringIO):
"""自定义的 StringIO:每次 write 时立即通过 Queue 推送到事件循环"""
def __init__(self, queue: asyncio.Queue[str | None], loop: asyncio.AbstractEventLoop):
super().__init__()
self._queue = queue
self._loop = loop
def write(self, s: str) -> int:
"""重写 write,每写一次就把内容丢到 Queue"""
if s:
self._loop.call_soon_threadsafe(self._queue.put_nowait, s)
return len(s)
async def run_yutto(argv: list[str]) -> AsyncGenerator[str, None]:
"""在当前进程内执行 yutto CLI,实时产出 SSE 数据"""
loop = asyncio.get_running_loop()
queue: asyncio.Queue[str | None] = asyncio.Queue()
# 临时接管 stdout / stderr
stdout_backup, stderr_backup = sys.stdout, sys.stderr
sys.stdout = _AsyncWriter(queue, loop)
sys.stderr = _AsyncWriter(queue, loop)
# 在线程池执行同步的 yutto.main
def _worker():
# 伪装 sys.argv
argv_backup = sys.argv
sys.argv = ["yutto", *argv, '--no-color']
try:
_YUTTO_MAIN() # 进入 yutto 的主函数
except SystemExit: # yutto 内部可能调用 sys.exit()
pass
finally:
sys.argv = argv_backup
# 通知协程:任务结束
loop.call_soon_threadsafe(queue.put_nowait, None)
# 把 _worker 丢进默认线程池,避免阻塞事件循环
loop.run_in_executor(None, _worker)
# 异步迭代 queue,并包装成 SSE
while True:
line = await queue.get()
if line is None: # 收到结束标记
break
yield f"data: {line.rstrip()}\n\n"
# 恢复 stdout / stderr
sys.stdout, sys.stderr = stdout_backup, stderr_backup
|
28harishkumar/blog
| 2,224
|
resources/views/admin/profile.blade.php
|
@extends('app')
@section('title')
{{ $user->name }}
@endsection
@section('content')
<div>
<ul class="list-group">
<li class="list-group-item">
Joined on {{$user->created_at->format('M d,Y \a\t h:i a') }}
</li>
<li class="list-group-item panel-body">
<table class="table-padding">
<style>
.table-padding td{
padding: 3px 8px;
}
</style>
<tr>
<td>Total Posts</td>
<td> {{$posts_count}}</td>
@if($author && $posts_count)
<td><a href="{{ url('/my-all-posts')}}">Show All</a></td>
@endif
</tr>
<tr>
<td>Published Posts</td>
<td>{{$posts_active_count}}</td>
@if($posts_active_count)
<td><a href="{{ url('/user/'.$user->id.'/posts')}}">Show All</a></td>
@endif
</tr>
<tr>
<td>Posts in Draft </td>
<td>{{$posts_draft_count}}</td>
@if($author && $posts_draft_count)
<td><a href="{{ url('my-drafts')}}">Show All</a></td>
@endif
</tr>
</table>
</li>
<li class="list-group-item">
Total Comments {{$comments_count}}
</li>
</ul>
</div>
<div class="panel panel-default">
<div class="panel-heading"><h3>Latest Posts</h3></div>
<div class="panel-body">
@if(!empty($latest_posts[0]))
@foreach($latest_posts as $latest_post)
<p>
<strong><a href="{{ url('/'.$latest_post->slug) }}">{{ $latest_post->title }}</a></strong>
<span class="well-sm">On {{ $latest_post->created_at->format('M d,Y \a\t h:i a') }}</span>
</p>
@endforeach
@else
<p>You have not written any post till now.</p>
@endif
</div>
</div>
<div class="panel panel-default">
<div class="panel-heading"><h3>Latest Comments</h3></div>
<div class="list-group">
@if(!empty($latest_comments[0]))
@foreach($latest_comments as $latest_comment)
<div class="list-group-item">
<p>{{ $latest_comment->body }}</p>
<p>On {{ $latest_comment->created_at->format('M d,Y \a\t h:i a') }}</p>
<p>On post <a href="{{ url('/'.$latest_comment->post->slug) }}">{{ $latest_comment->post->title }}</a></p>
</div>
@endforeach
@else
<div class="list-group-item">
<p>You have not commented till now. Your latest 5 comments will be displayed here</p>
</div>
@endif
</div>
</div>
@endsection
|
28harishkumar/blog
| 1,366
|
resources/views/auth/password.blade.php
|
@extends('app')
@section('content')
<div class="container-fluid">
<div class="row">
<div class="col-md-8 col-md-offset-2">
<div class="panel panel-default">
<div class="panel-heading">Reset Password</div>
<div class="panel-body">
@if (session('status'))
<div class="alert alert-success">
{{ session('status') }}
</div>
@endif
@if (count($errors) > 0)
<div class="alert alert-danger">
<strong>Whoops!</strong> There were some problems with your input.<br><br>
<ul>
@foreach ($errors->all() as $error)
<li>{{ $error }}</li>
@endforeach
</ul>
</div>
@endif
<form class="form-horizontal" role="form" method="POST" action="{{ url('/password/email') }}">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<div class="form-group">
<label class="col-md-4 control-label">E-Mail Address</label>
<div class="col-md-6">
<input type="email" class="form-control" name="email" value="{{ old('email') }}">
</div>
</div>
<div class="form-group">
<div class="col-md-6 col-md-offset-4">
<button type="submit" class="btn btn-primary">
Send Password Reset Link
</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
@endsection
|
2977094657/BilibiliHistoryFetcher
| 14,781
|
scripts/import_database.py
|
import pymysql
import json
import os
import time
import threading
from datetime import datetime
import sys
from config.sql_statements_mysql import *
from scripts.utils import load_config, get_base_path, get_output_path
config = load_config()
base_path = get_base_path()
# 加载分类映射
def load_categories():
"""从 categories.json 文件中加载分类信息"""
try:
# 获取基础路径
if getattr(sys, 'frozen', False):
# 如果是打包后的exe运行,配置文件在_internal目录中
base_path = os.path.dirname(sys.executable)
categories_path = os.path.join(base_path, '_internal', 'config', 'categories.json')
else:
# 如果是直接运行python脚本
base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
categories_path = os.path.join(base_path, 'config', 'categories.json')
print(f"\n=== 分类配置信息 ===")
print(f"基础路径: {base_path}")
print(f"配置文件路径: {categories_path}")
print(f"配置文件存在: {os.path.exists(categories_path)}")
# 如果配置文件不存在,尝试其他可能的位置
if not os.path.exists(categories_path):
alternative_paths = [
os.path.join(base_path, 'config', 'categories.json'),
os.path.join(os.getcwd(), 'config', 'categories.json'),
os.path.join(base_path, '_internal', 'config', 'categories.json'),
]
print("\n尝试其他可能的配置文件位置:")
for alt_path in alternative_paths:
print(f"检查: {alt_path} - {'存在' if os.path.exists(alt_path) else '不存在'}")
if os.path.exists(alt_path):
categories_path = alt_path
break
if not os.path.exists(categories_path):
raise FileNotFoundError(f"找不到分类配置文件: {categories_path}")
with open(categories_path, 'r', encoding='utf-8') as f:
categories = json.load(f)
print(f"成功加载分类配置")
return categories['duplicated_tags'], categories['unique_tag_to_main']
except Exception as e:
print(f"加载分类配置时发生错误: {e}")
raise
duplicated_tags, unique_tag_to_main = load_categories()
# 雪花算法生成器类
class SnowflakeIDGenerator:
def __init__(self, machine_id=1, datacenter_id=1):
self.lock = threading.Lock()
self.machine_id = machine_id & 0x3FF # 10 位
self.datacenter_id = datacenter_id & 0x3FF # 10 位
self.sequence = 0
self.last_timestamp = -1
self.epoch = 1609459200000 # 2021-01-01 00:00:00 UTC 以毫秒为单位
def _current_millis(self):
return int(time.time() * 1000)
def get_id(self):
with self.lock:
timestamp = self._current_millis()
if timestamp < self.last_timestamp:
raise Exception("时钟向后移动。拒绝生成 id。")
if timestamp == self.last_timestamp:
self.sequence = (self.sequence + 1) & 0xFFF # 12 bits
if self.sequence == 0:
# 等待下一秒
while timestamp <= self.last_timestamp:
timestamp = self._current_millis()
else:
self.sequence = 0
self.last_timestamp = timestamp
# 生成64位ID
id = ((timestamp - self.epoch) << 22) | (self.datacenter_id << 12) | self.sequence
return id
# 初始化雪花ID生成器
id_generator = SnowflakeIDGenerator(machine_id=1, datacenter_id=1)
# 获取当前年份和上一年份
def get_years():
current_year = datetime.now().year
previous_year = current_year - 1
return current_year, previous_year
# 连接到 MySQL 数据库,并在必要时创建数据库
def connect_to_db():
try:
# 首先连接到 MySQL,不指定数据库
connection = pymysql.connect(
host=os.getenv('DB_HOST', 'localhost'),
port=int(os.getenv('DB_PORT', 3306)),
user=os.getenv('DB_USER', 'root'),
password=os.getenv('DB_PASSWORD', '123456789'),
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor,
autocommit=True
)
with connection.cursor() as cursor:
# 列出所有数据库
cursor.execute("SHOW DATABASES;")
databases = cursor.fetchall()
# 检查数据库是否存在
db_name = os.getenv('DB_NAME', 'bilibilihistory')
print(f"使用的数据库名称是: {db_name}")
cursor.execute(SHOW_DATABASES, (db_name,))
result = cursor.fetchone()
if not result:
# 创建数据库
cursor.execute(CREATE_DATABASE.format(db_name=db_name))
print(f"数据库 '{db_name}' 已创建。")
else:
print(f"数据库 '{db_name}' 已存在。")
connection.close()
# 重新连接到刚创建或已存在的数据库
connection = pymysql.connect(
host=os.getenv('DB_HOST', 'localhost'),
port=int(os.getenv('DB_PORT', 3306)),
user=os.getenv('DB_USER', 'root'),
password=os.getenv('DB_PASSWORD', '123456789'),
db=db_name,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor
)
# 再次列出当前连接的数据库
with connection.cursor() as cursor:
cursor.execute(SELECT_DATABASE)
current_db = cursor.fetchone()['current_db']
print(f"当前连接的数据库是: {current_db}")
return connection
except Exception as e:
print(f"连接到数据库时发生错误: {e}")
raise
# 创建新年份的表,如果不存在
def create_new_year_table(connection, new_table, reference_table):
try:
with connection.cursor() as cursor:
# 检查新表是否存在
cursor.execute(SHOW_TABLES, (connection.db, new_table))
if cursor.fetchone()['COUNT(*)'] == 0:
# 检查参考表是否存在
cursor.execute(SHOW_TABLES, (connection.db, reference_table))
if cursor.fetchone()['COUNT(*)'] == 0:
# 如果参考表不存在,使用默认的 CREATE TABLE 语句
create_table_sql = CREATE_TABLE_DEFAULT.format(table=new_table)
else:
# 如果参考表存在,复制参考表的结构
create_table_sql = CREATE_TABLE_LIKE.format(new_table=new_table, reference_table=reference_table)
cursor.execute(create_table_sql)
connection.commit()
base_structure = "默认结构" if "CREATE TABLE" in create_table_sql else reference_table
print(f"已创建新表: {new_table},基于表: {base_structure}")
else:
print(f"表 {new_table} 已存在,无需创建。")
except Exception as e:
connection.rollback()
print(f"创建新表时发生错误: {e}")
raise
# 批量插入数据到 MySQL,支持事务回滚
def batch_insert_data(connection, insert_sql, data_chunk):
try:
with connection.cursor() as cursor:
cursor.executemany(insert_sql, data_chunk)
connection.commit()
return len(data_chunk)
except Exception as e:
connection.rollback()
print(f" 插入数据时发生错误: {e}")
return 0
# 从 JSON 文件导入数据
def import_data_from_json(connection, insert_sql, file_path, batch_size=1000):
with open(file_path, 'r', encoding='utf-8') as f:
try:
data = json.load(f)
except json.JSONDecodeError as e:
print(f"JSON 解码错误在文件 {file_path}: {e}")
return 0
except Exception as e:
print(f"读取文件 {file_path} 时发生错误: {e}")
return 0
total_inserted = 0
try:
# 构建要插入的数据列表,并生成唯一的id
new_data = []
for index, item in enumerate(data, start=1):
main_category = None
history = item.get('history', {})
business = history.get('business', '')
# 始终获取 tag_name,即使 business 不是 'archive'
tag_name = item.get('tag_name', '').strip()
if business == 'archive':
if tag_name in unique_tag_to_main:
main_category = unique_tag_to_main[tag_name]
elif tag_name in duplicated_tags:
main_category = '待定'
else:
main_category = '待定'
# 如果 business 不为 'archive',main_category 保持为 None
record = {
"id": id_generator.get_id(), # 生成唯一ID
"title": item.get('title', ''),
"long_title": item.get('long_title', ''),
"cover": item.get('cover', ''),
"covers": json.dumps(item.get('covers', [])),
"uri": item.get('uri', ''),
"oid": history.get('oid', 0),
"epid": history.get('epid', 0),
"bvid": history.get('bvid', ''),
"page": history.get('page', 1),
"cid": history.get('cid', 0),
"part": history.get('part', ''),
"business": business,
"dt": history.get('dt', 0),
"videos": item.get('videos', 1),
"author_name": item.get('author_name', ''),
"author_face": item.get('author_face', ''),
"author_mid": item.get('author_mid', 0),
"view_at": item.get('view_at', 0),
"progress": item.get('progress', 0),
"badge": item.get('badge', ''),
"show_title": item.get('show_title', ''),
"duration": item.get('duration', 0),
"current": item.get('current', ''),
"total": item.get('total', 0),
"new_desc": item.get('new_desc', ''),
"is_finish": item.get('is_finish', 0),
"is_fav": item.get('is_fav', 0),
"kid": item.get('kid', 0),
"tag_name": tag_name, # 确保 tag_name 被赋值
"live_status": item.get('live_status', 0),
"main_category": main_category # 设置主分区
}
new_data.append(record)
# 分批插入数据
for i in range(0, len(new_data), batch_size):
batch_chunk = new_data[i:i + batch_size]
inserted_count = batch_insert_data(connection, insert_sql, batch_chunk)
total_inserted += inserted_count
return total_inserted
except Exception as e:
print(f"处理数据时发生错误: {e}")
return 0
# 读取标记文件,返回上次导入的日期和文件名
def get_last_imported_file():
file_path = get_output_path(config['log_file'])
if not os.path.exists(file_path):
return None, None
with open(file_path, 'r', encoding='utf-8') as f:
try:
data = json.load(f)
return data.get('last_imported_date'), data.get('last_imported_file')
except json.JSONDecodeError:
print("标记文件格式错误,无法解析。")
return None, None
# 更新标记文件,记录本次导入的日期和文件名
def update_last_imported_file(last_imported_date, last_imported_file):
file_path = get_output_path(config['log_file'])
data = {
'last_imported_date': last_imported_date,
'last_imported_file': last_imported_file
}
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
# 遍历所有按日期分割的文件并导入数据
def import_all_history_files():
base_path = get_base_path()
full_data_folder = os.path.join(base_path, 'output/history_by_date') # 修改为新的路径
log_file = os.path.join(base_path, config['log_file'])
total_inserted = 0
file_insert_counts = {}
print(f"开始遍历并导入文件夹 '{full_data_folder}' 中的数据...")
if not os.path.exists(full_data_folder):
print(f"本地文件夹 '{full_data_folder}' 不存在,无法加载数据。")
return {"status": "error", "message": f"本地文件夹 '{full_data_folder}' 不存在,无法加载数据。"}
# 获取当前年份和上一年份
current_year, previous_year = get_years()
new_table = f"bilibili_history_{current_year}"
reference_table = f"bilibili_history_{previous_year}"
connection = connect_to_db()
try:
# 创建新年份的表,如果不存在
create_new_year_table(connection, new_table, reference_table)
# 定义动态的 INSERT SQL 语句
insert_sql = INSERT_DATA.format(table=new_table)
# 读取上次导入的文件日期和文件名
last_imported_date, last_imported_file = get_last_imported_file()
print(f"上次导入的日期: {last_imported_date}, 文件: {last_imported_file}")
# 遍历按日期分割的文件夹
for year in sorted(os.listdir(full_data_folder)):
year_path = os.path.join(full_data_folder, year)
if os.path.isdir(year_path) and year.isdigit():
for month in sorted(os.listdir(year_path)):
month_path = os.path.join(year_path, month)
if os.path.isdir(month_path) and month.isdigit():
for day_file in sorted(os.listdir(month_path)):
if day_file.endswith('.json'):
day_path = os.path.join(month_path, day_file)
# 获取当前文件的日期
day = ''.join(filter(str.isdigit, day_file))[:2] # 提取前两位数字作为日
if len(day) != 2:
print(f"无法解析文件名中的日期: {day_file},跳过文件。")
continue
file_date = f"{year}-{month.zfill(2)}-{day.zfill(2)}"
# 如果当前文件日期和上次相同,继续检查文件名顺序
if last_imported_date:
if file_date < last_imported_date:
print(f"跳过文件 {day_path},日期 {file_date} 在上次导入日期之前。")
continue
elif file_date == last_imported_date and day_file <= last_imported_file:
print(f"跳过文件 {day_path},文件名 {day_file} 在上次导入文件之前或相同。")
continue
# 开始导入文件
inserted_count = import_data_from_json(connection, insert_sql, day_path)
total_inserted += inserted_count
file_insert_counts[day_path] = inserted_count
# 更新标记文件
update_last_imported_file(file_date, day_file)
# 输出每个文件的插入条数
print("\n每个文件的插入记录:")
for file, count in file_insert_counts.items():
print(f"{file}: 插入或更新了 {count} 条数据")
# 输出总插入条数
print(f"\n所有文件均已导入数据库,总共插入或更新了 {total_inserted} 条数据。")
except Exception as e:
print(f"导入过程中发生错误: {e}")
finally:
connection.close()
return {"status": "success", "message": f"所有文件均已导入数据库,总共插入或更新了 {total_inserted} 条数据。"}
# 供外部调用的接口
def import_history():
return import_all_history_files()
# 如果该脚本直接运行,则调用 import_all_history_files()
if __name__ == '__main__':
result = import_all_history_files()
if result["status"] == "success":
print(result["message"])
else:
print(f"错误: {result['message']}")
|
2977094657/BilibiliHistoryFetcher
| 10,682
|
scripts/comment_fetcher.py
|
import json
import os
import sqlite3
import time
from datetime import datetime
from typing import Dict, List
import requests
def create_comments_table(connection):
"""创建评论表"""
create_table_sql = """
CREATE TABLE IF NOT EXISTS comments (
rpid TEXT PRIMARY KEY,
uid TEXT NOT NULL,
message TEXT NOT NULL,
time INTEGER NOT NULL,
rank INTEGER NOT NULL,
rootid TEXT,
parentid TEXT,
oid TEXT NOT NULL,
type INTEGER NOT NULL,
fetch_time INTEGER NOT NULL
);
"""
create_indexes = [
"CREATE INDEX IF NOT EXISTS idx_comments_uid ON comments (uid);",
"CREATE INDEX IF NOT EXISTS idx_comments_time ON comments (time);",
"CREATE INDEX IF NOT EXISTS idx_comments_fetch_time ON comments (fetch_time);"
]
# 创建用户表
create_users_table_sql = """
CREATE TABLE IF NOT EXISTS comment_users (
uid TEXT PRIMARY KEY,
first_fetch_time INTEGER NOT NULL,
last_fetch_time INTEGER NOT NULL
);
"""
try:
cursor = connection.cursor()
cursor.execute(create_table_sql)
# 创建索引
for index_sql in create_indexes:
cursor.execute(index_sql)
# 创建用户表
cursor.execute(create_users_table_sql)
connection.commit()
print("评论表和用户表创建成功")
except Exception as e:
print(f"创建表时发生错误: {e}")
raise
def check_user_exists(connection, uid: str) -> bool:
"""检查用户是否存在于用户表中"""
query = "SELECT 1 FROM comment_users WHERE uid = ?"
cursor = connection.cursor()
cursor.execute(query, (uid,))
return cursor.fetchone() is not None
def update_user_record(connection, uid: str):
"""更新或插入用户记录"""
current_time = int(time.time())
# 先检查用户是否存在
if check_user_exists(connection, uid):
# 更新最后获取时间
update_sql = """
UPDATE comment_users
SET last_fetch_time = ?
WHERE uid = ?
"""
connection.cursor().execute(update_sql, (current_time, uid))
else:
# 插入新用户记录
insert_sql = """
INSERT INTO comment_users (uid, first_fetch_time, last_fetch_time)
VALUES (?, ?, ?)
"""
connection.cursor().execute(insert_sql, (uid, current_time, current_time))
connection.commit()
print(f"用户记录 {uid} 已更新")
def fetch_comments(uid: str, mode: str = "0", keyword: str = "") -> List[Dict]:
"""获取用户评论数据"""
base_url = "https://api.aicu.cc/api/v3/search/getreply"
all_replies = []
page = 1
page_size = 500 # 使用最大页面大小
while True:
params = {
"uid": uid,
"pn": str(page),
"ps": str(page_size),
"mode": mode,
"keyword": keyword
}
try:
response = requests.get(base_url, params=params)
response.raise_for_status()
data = response.json()
if data["code"] != 0:
print(f"API返回错误: {data['message']}")
break
replies = data["data"]["replies"]
if not replies:
break
all_replies.extend(replies)
if data["data"]["cursor"]["is_end"]:
break
page += 1
time.sleep(1) # 添加延迟避免请求过快
except Exception as e:
print(f"获取评论数据时发生错误: {e}")
break
return all_replies
def save_comments_to_file(uid: str, comments: List[Dict]):
"""保存评论数据到JSON文件"""
timestamp = int(time.time())
output_dir = os.path.join("output", "comment", uid)
os.makedirs(output_dir, exist_ok=True)
file_path = os.path.join(output_dir, f"{timestamp}.json")
with open(file_path, "w", encoding="utf-8") as f:
json.dump(comments, f, ensure_ascii=False, indent=2)
print(f"评论数据已保存到: {file_path}")
return file_path
def create_connection():
"""创建SQLite数据库连接"""
try:
db_path = os.path.join("output", "database")
os.makedirs(db_path, exist_ok=True)
db_file = os.path.join(db_path, "bilibili_comments.db")
conn = sqlite3.connect(db_file)
print(f"成功连接到评论数据库: {db_file}")
return conn
except sqlite3.Error as e:
print(f"连接数据库时发生错误: {e}")
raise
def insert_comments_to_db(connection, comments: List[Dict], uid: str):
"""将评论数据插入到数据库"""
insert_sql = """
INSERT OR REPLACE INTO comments (
rpid, uid, message, time, rank, rootid, parentid, oid, type, fetch_time
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
fetch_time = int(time.time())
data_to_insert = []
for comment in comments:
parent = comment.get("parent", {})
dyn = comment.get("dyn", {})
record = (
comment["rpid"],
uid,
comment["message"],
comment["time"],
comment["rank"],
parent.get("rootid"),
parent.get("parentid"),
str(dyn.get("oid", 0)), # 转换为字符串
dyn.get("type", 0),
fetch_time
)
data_to_insert.append(record)
if data_to_insert:
cursor = connection.cursor()
try:
cursor.executemany(insert_sql, data_to_insert)
connection.commit()
print(f"成功插入 {len(data_to_insert)} 条评论数据")
except sqlite3.Error as e:
connection.rollback()
print(f"插入数据时发生错误: {e}")
def fetch_and_save_comments(uid: str, mode: str = "0", keyword: str = ""):
"""获取并保存用户评论数据"""
# 获取评论数据
comments = fetch_comments(uid, mode, keyword)
# 创建数据库连接
connection = create_connection()
try:
create_comments_table(connection)
# 无论是否获取到评论,都更新用户记录
update_user_record(connection, uid)
if not comments:
print("未获取到评论数据")
return {
"message": "未获取到评论数据",
"total_count": 0,
"latest_comment_time": None
}
# 保存到文件
save_comments_to_file(uid, comments)
# 保存到数据库
insert_comments_to_db(connection, comments, uid)
finally:
connection.close()
# 获取最新评论时间(第一条数据的时间)
latest_time = comments[0]["time"]
# 转换为北京时间
latest_time_str = datetime.fromtimestamp(latest_time).strftime("%Y-%m-%d %H:%M:%S")
return {
"message": "评论数据获取成功",
"total_count": len(comments),
"latest_comment_time": latest_time_str
}
def query_comments(connection, uid: str, page: int = 1, page_size: int = 20,
comment_type: str = "all", keyword: str = "", comment_type_filter: int = None) -> Dict:
"""
查询用户评论数据
Args:
connection: 数据库连接
uid: 用户ID
page: 页码,从1开始
page_size: 每页数量
comment_type: 评论类型,可选值:all(全部), root(一级评论), reply(二级评论)
keyword: 关键词,用于模糊匹配评论内容
comment_type_filter: 评论类型筛选(type字段),例如:1(视频评论),17(动态评论)等
Returns:
dict: 包含评论列表和总数的字典
"""
# 构建基础查询条件
conditions = ["uid = ?"]
params = [uid]
# 添加评论类型条件
if comment_type == "root":
conditions.append("parentid IS NULL")
elif comment_type == "reply":
conditions.append("parentid IS NOT NULL")
# 添加评论类型筛选条件
if comment_type_filter is not None:
conditions.append("type = ?")
params.append(comment_type_filter)
# 添加关键词条件
if keyword:
conditions.append("message LIKE ?")
params.append(f"%{keyword}%")
# 构建WHERE子句
where_clause = " AND ".join(conditions)
# 计算总数
count_sql = f"""
SELECT COUNT(*) FROM comments WHERE {where_clause}
"""
cursor = connection.cursor()
cursor.execute(count_sql, params)
total_count = cursor.fetchone()[0]
# 计算偏移量
offset = (page - 1) * page_size
# 构建查询SQL
query_sql = f"""
SELECT rpid, uid, message, time, rank, rootid, parentid, oid, type, fetch_time
FROM comments
WHERE {where_clause}
ORDER BY time DESC
LIMIT ? OFFSET ?
"""
# 添加分页参数
params.extend([page_size, offset])
# 执行查询
cursor.execute(query_sql, params)
rows = cursor.fetchall()
# 转换结果为字典列表
comments = []
for row in rows:
comment = {
"rpid": row[0],
"uid": row[1],
"message": row[2],
"time": row[3],
"rank": row[4],
"rootid": row[5],
"parentid": row[6],
"oid": row[7], # 已经是字符串了
"type": row[8],
"fetch_time": row[9]
}
# 转换时间戳为可读时间(直接转换,不需要加8小时)
comment["time_str"] = datetime.fromtimestamp(comment["time"]).strftime("%Y-%m-%d %H:%M:%S")
comment["fetch_time_str"] = datetime.fromtimestamp(comment["fetch_time"]).strftime("%Y-%m-%d %H:%M:%S")
comments.append(comment)
return {
"total": total_count,
"page": page,
"page_size": page_size,
"total_pages": (total_count + page_size - 1) // page_size,
"comments": comments
}
def get_user_comments(uid: str, page: int = 1, page_size: int = 20,
comment_type: str = "all", keyword: str = "", comment_type_filter: int = None) -> Dict:
"""
获取用户评论,如果用户不存在则先获取数据
Args:
uid: 用户ID
page: 页码,从1开始
page_size: 每页数量
comment_type: 评论类型,可选值:all(全部), root(一级评论), reply(二级评论)
keyword: 关键词,用于模糊匹配评论内容
comment_type_filter: 评论类型筛选(type字段),例如:1(视频评论),17(动态评论)等
Returns:
dict: 包含评论列表和总数的字典
"""
connection = create_connection()
try:
create_comments_table(connection)
# 检查用户是否存在
if not check_user_exists(connection, uid):
print(f"用户 {uid} 不存在,先获取数据")
connection.close() # 先关闭当前连接
# 获取用户评论数据
fetch_and_save_comments(uid)
# 重新连接数据库
connection = create_connection()
# 查询评论数据
result = query_comments(
connection=connection,
uid=uid,
page=page,
page_size=page_size,
comment_type=comment_type,
keyword=keyword,
comment_type_filter=comment_type_filter
)
return result
finally:
connection.close()
if __name__ == "__main__":
# 示例使用
uid = "17497789" # 替换为实际的用户ID
result = get_user_comments(uid)
print(f"获取结果: {result}")
|
281677160/openwrt-package
| 56,014
|
luci-app-homeproxy/htdocs/luci-static/resources/view/homeproxy/client.js
|
/*
* SPDX-License-Identifier: GPL-2.0-only
*
* Copyright (C) 2022-2025 ImmortalWrt.org
*/
'use strict';
'require form';
'require network';
'require poll';
'require rpc';
'require uci';
'require validation';
'require view';
'require homeproxy as hp';
'require tools.firewall as fwtool';
'require tools.widgets as widgets';
const callServiceList = rpc.declare({
object: 'service',
method: 'list',
params: ['name'],
expect: { '': {} }
});
const callReadDomainList = rpc.declare({
object: 'luci.homeproxy',
method: 'acllist_read',
params: ['type'],
expect: { '': {} }
});
const callWriteDomainList = rpc.declare({
object: 'luci.homeproxy',
method: 'acllist_write',
params: ['type', 'content'],
expect: { '': {} }
});
function getServiceStatus() {
return L.resolveDefault(callServiceList('homeproxy'), {}).then((res) => {
let isRunning = false;
try {
isRunning = res['homeproxy']['instances']['sing-box-c']['running'];
} catch (e) { }
return isRunning;
});
}
function renderStatus(isRunning, version) {
let spanTemp = '<em><span style="color:%s"><strong>%s (sing-box v%s) %s</strong></span></em>';
let renderHTML;
if (isRunning)
renderHTML = spanTemp.format('green', _('HomeProxy'), version, _('RUNNING'));
else
renderHTML = spanTemp.format('red', _('HomeProxy'), version, _('NOT RUNNING'));
return renderHTML;
}
let stubValidator = {
factory: validation,
apply(type, value, args) {
if (value != null)
this.value = value;
return validation.types[type].apply(this, args);
},
assert(condition) {
return !!condition;
}
};
return view.extend({
load() {
return Promise.all([
uci.load('homeproxy'),
hp.getBuiltinFeatures(),
network.getHostHints()
]);
},
render(data) {
let m, s, o, ss, so;
let features = data[1],
hosts = data[2]?.hosts;
/* Cache all configured proxy nodes, they will be called multiple times */
let proxy_nodes = {};
uci.sections(data[0], 'node', (res) => {
let nodeaddr = ((res.type === 'direct') ? res.override_address : res.address) || '',
nodeport = ((res.type === 'direct') ? res.override_port : res.port) || '';
proxy_nodes[res['.name']] =
String.format('[%s] %s', res.type, res.label || ((stubValidator.apply('ip6addr', nodeaddr) ?
String.format('[%s]', nodeaddr) : nodeaddr) + ':' + nodeport));
});
m = new form.Map('homeproxy', _('HomeProxy'),
_('The modern ImmortalWrt proxy platform for ARM64/AMD64.'));
s = m.section(form.TypedSection);
s.render = function () {
poll.add(function () {
return L.resolveDefault(getServiceStatus()).then((res) => {
let view = document.getElementById('service_status');
view.innerHTML = renderStatus(res, features.version);
});
});
return E('div', { class: 'cbi-section', id: 'status_bar' }, [
E('p', { id: 'service_status' }, _('Collecting data...'))
]);
}
s = m.section(form.NamedSection, 'config', 'homeproxy');
s.tab('routing', _('Routing Settings'));
o = s.taboption('routing', form.ListValue, 'main_node', _('Main node'));
o.value('nil', _('Disable'));
o.value('urltest', _('URLTest'));
for (let i in proxy_nodes)
o.value(i, proxy_nodes[i]);
o.default = 'nil';
o.depends({'routing_mode': 'custom', '!reverse': true});
o.rmempty = false;
o = s.taboption('routing', hp.CBIStaticList, 'main_urltest_nodes', _('URLTest nodes'),
_('List of nodes to test.'));
for (let i in proxy_nodes)
o.value(i, proxy_nodes[i]);
o.depends('main_node', 'urltest');
o.rmempty = false;
o = s.taboption('routing', form.Value, 'main_urltest_interval', _('Test interval'),
_('The test interval in seconds.'));
o.datatype = 'uinteger';
o.placeholder = '180';
o.depends('main_node', 'urltest');
o = s.taboption('routing', form.Value, 'main_urltest_tolerance', _('Test tolerance'),
_('The test tolerance in milliseconds.'));
o.datatype = 'uinteger';
o.placeholder = '50';
o.depends('main_node', 'urltest');
o = s.taboption('routing', form.ListValue, 'main_udp_node', _('Main UDP node'));
o.value('nil', _('Disable'));
o.value('same', _('Same as main node'));
o.value('urltest', _('URLTest'));
for (let i in proxy_nodes)
o.value(i, proxy_nodes[i]);
o.default = 'nil';
o.depends({'routing_mode': /^((?!custom).)+$/, 'proxy_mode': /^((?!redirect$).)+$/});
o.rmempty = false;
o = s.taboption('routing', hp.CBIStaticList, 'main_udp_urltest_nodes', _('URLTest nodes'),
_('List of nodes to test.'));
for (let i in proxy_nodes)
o.value(i, proxy_nodes[i]);
o.depends('main_udp_node', 'urltest');
o.rmempty = false;
o = s.taboption('routing', form.Value, 'main_udp_urltest_interval', _('Test interval'),
_('The test interval in seconds.'));
o.datatype = 'uinteger';
o.placeholder = '180';
o.depends('main_udp_node', 'urltest');
o = s.taboption('routing', form.Value, 'main_udp_urltest_tolerance', _('Test tolerance'),
_('The test tolerance in milliseconds.'));
o.datatype = 'uinteger';
o.placeholder = '50';
o.depends('main_udp_node', 'urltest');
o = s.taboption('routing', form.Value, 'dns_server', _('DNS server'),
_('Support UDP, TCP, DoH, DoQ, DoT. TCP protocol will be used if not specified.'));
o.value('wan', _('WAN DNS (read from interface)'));
o.value('1.1.1.1', _('CloudFlare Public DNS (1.1.1.1)'));
o.value('208.67.222.222', _('Cisco Public DNS (208.67.222.222)'));
o.value('8.8.8.8', _('Google Public DNS (8.8.8.8)'));
o.value('', '---');
o.value('223.5.5.5', _('Aliyun Public DNS (223.5.5.5)'));
o.value('119.29.29.29', _('Tencent Public DNS (119.29.29.29)'));
o.value('117.50.10.10', _('ThreatBook Public DNS (117.50.10.10)'));
o.default = '8.8.8.8';
o.rmempty = false;
o.depends({'routing_mode': 'custom', '!reverse': true});
o.validate = function(section_id, value) {
if (section_id && !['wan'].includes(value)) {
if (!value)
return _('Expecting: %s').format(_('non-empty value'));
let ipv6_support = this.section.formvalue(section_id, 'ipv6_support');
try {
let url = new URL(value.replace(/^.*:\/\//, 'http://'));
if (stubValidator.apply('hostname', url.hostname))
return true;
else if (stubValidator.apply('ip4addr', url.hostname))
return true;
else if ((ipv6_support === '1') && stubValidator.apply('ip6addr', url.hostname.match(/^\[(.+)\]$/)?.[1]))
return true;
else
return _('Expecting: %s').format(_('valid DNS server address'));
} catch(e) {}
if (!stubValidator.apply((ipv6_support === '1') ? 'ipaddr' : 'ip4addr', value))
return _('Expecting: %s').format(_('valid DNS server address'));
}
return true;
}
o = s.taboption('routing', form.Value, 'china_dns_server', _('China DNS server'),
_('The dns server for resolving China domains. Support UDP, TCP, DoH, DoQ, DoT.'));
o.value('wan', _('WAN DNS (read from interface)'));
o.value('223.5.5.5', _('Aliyun Public DNS (223.5.5.5)'));
o.value('210.2.4.8', _('CNNIC Public DNS (210.2.4.8)'));
o.value('119.29.29.29', _('Tencent Public DNS (119.29.29.29)'));
o.value('117.50.10.10', _('ThreatBook Public DNS (117.50.10.10)'));
o.depends('routing_mode', 'bypass_mainland_china');
o.default = '223.5.5.5';
o.rmempty = false;
o.validate = function(section_id, value) {
if (section_id && !['wan'].includes(value)) {
if (!value)
return _('Expecting: %s').format(_('non-empty value'));
try {
let url = new URL(value.replace(/^.*:\/\//, 'http://'));
if (stubValidator.apply('hostname', url.hostname))
return true;
else if (stubValidator.apply('ip4addr', url.hostname))
return true;
else if (stubValidator.apply('ip6addr', url.hostname.match(/^\[(.+)\]$/)?.[1]))
return true;
else
return _('Expecting: %s').format(_('valid DNS server address'));
} catch(e) {}
if (!stubValidator.apply('ipaddr', value))
return _('Expecting: %s').format(_('valid DNS server address'));
}
return true;
}
o = s.taboption('routing', form.ListValue, 'routing_mode', _('Routing mode'));
o.value('gfwlist', _('GFWList'));
o.value('bypass_mainland_china', _('Bypass mainland China'));
o.value('proxy_mainland_china', _('Only proxy mainland China'));
o.value('custom', _('Custom routing'));
o.value('global', _('Global'));
o.default = 'bypass_mainland_china';
o.rmempty = false;
o.onchange = function(ev, section_id, value) {
if (section_id && value === 'custom')
this.map.save(null, true);
}
o = s.taboption('routing', form.Value, 'routing_port', _('Routing ports'),
_('Specify target ports to be proxied. Multiple ports must be separated by commas.'));
o.value('', _('All ports'));
o.value('common', _('Common ports only (bypass P2P traffic)'));
o.validate = function(section_id, value) {
if (section_id && value && value !== 'common') {
let ports = [];
for (let i of value.split(',')) {
if (!stubValidator.apply('port', i) && !stubValidator.apply('portrange', i))
return _('Expecting: %s').format(_('valid port value'));
if (ports.includes(i))
return _('Port %s alrealy exists!').format(i);
ports = ports.concat(i);
}
}
return true;
}
o = s.taboption('routing', form.ListValue, 'proxy_mode', _('Proxy mode'));
o.value('redirect', _('Redirect TCP'));
if (features.hp_has_tproxy)
o.value('redirect_tproxy', _('Redirect TCP + TProxy UDP'));
if (features.hp_has_ip_full && features.hp_has_tun) {
o.value('redirect_tun', _('Redirect TCP + Tun UDP'));
o.value('tun', _('Tun TCP/UDP'));
} else {
o.description = _('To enable Tun support, you need to install <code>ip-full</code> and <code>kmod-tun</code>');
}
o.default = 'redirect_tproxy';
o.rmempty = false;
o = s.taboption('routing', form.Flag, 'ipv6_support', _('IPv6 support'));
o.default = o.enabled;
o.rmempty = false;
/* Custom routing settings start */
/* Routing settings start */
o = s.taboption('routing', form.SectionValue, '_routing', form.NamedSection, 'routing', 'homeproxy');
o.depends('routing_mode', 'custom');
ss = o.subsection;
so = ss.option(form.ListValue, 'tcpip_stack', _('TCP/IP stack'),
_('TCP/IP stack.'));
if (features.with_gvisor) {
so.value('mixed', _('Mixed'));
so.value('gvisor', _('gVisor'));
}
so.value('system', _('System'));
so.default = 'system';
so.depends('homeproxy.config.proxy_mode', 'redirect_tun');
so.depends('homeproxy.config.proxy_mode', 'tun');
so.rmempty = false;
so.onchange = function(ev, section_id, value) {
let desc = ev.target.nextElementSibling;
if (value === 'mixed')
desc.innerHTML = _('Mixed <code>system</code> TCP stack and <code>gVisor</code> UDP stack.')
else if (value === 'gvisor')
desc.innerHTML = _('Based on google/gvisor.');
else if (value === 'system')
desc.innerHTML = _('Less compatibility and sometimes better performance.');
}
so = ss.option(form.Flag, 'endpoint_independent_nat', _('Enable endpoint-independent NAT'),
_('Performance may degrade slightly, so it is not recommended to enable on when it is not needed.'));
so.default = so.enabled;
so.depends('tcpip_stack', 'mixed');
so.depends('tcpip_stack', 'gvisor');
so.rmempty = false;
so = ss.option(form.Value, 'udp_timeout', _('UDP NAT expiration time'),
_('In seconds.'));
so.datatype = 'uinteger';
so.placeholder = '300';
so.depends('homeproxy.config.proxy_mode', 'redirect_tproxy');
so.depends('homeproxy.config.proxy_mode', 'redirect_tun');
so.depends('homeproxy.config.proxy_mode', 'tun');
so = ss.option(form.Flag, 'bypass_cn_traffic', _('Bypass CN traffic'),
_('Bypass mainland China traffic via firewall rules by default.'));
so.rmempty = false;
so = ss.option(form.ListValue, 'domain_strategy', _('Domain strategy'),
_('If set, the requested domain name will be resolved to IP before routing.'));
for (let i in hp.dns_strategy)
so.value(i, hp.dns_strategy[i]);
so = ss.option(form.Flag, 'sniff_override', _('Override destination'),
_('Override the connection destination address with the sniffed domain.'));
so.default = so.enabled;
so.rmempty = false;
so = ss.option(form.ListValue, 'default_outbound', _('Default outbound'),
_('Default outbound for connections not matched by any routing rules.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('nil', _('Disable (the service)'));
this.value('direct-out', _('Direct'));
this.value('block-out', _('Block'));
uci.sections(data[0], 'routing_node', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.default = 'nil';
so.rmempty = false;
so = ss.option(form.ListValue, 'default_outbound_dns', _('Default outbound DNS'),
_('Default DNS server for resolving domain name in the server address.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('default-dns', _('Default DNS (issued by WAN)'));
this.value('system-dns', _('System DNS'));
uci.sections(data[0], 'dns_server', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.default = 'default-dns';
so.rmempty = false;
/* Routing settings end */
/* Routing nodes start */
s.tab('routing_node', _('Routing Nodes'));
o = s.taboption('routing_node', form.SectionValue, '_routing_node', form.GridSection, 'routing_node');
o.depends('routing_mode', 'custom');
ss = o.subsection;
ss.addremove = true;
ss.rowcolors = true;
ss.sortable = true;
ss.nodescriptions = true;
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('Routing node'), _('Add a routing node'), data[0]);
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
so = ss.option(form.Value, 'label', _('Label'));
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'routing_node', 'label');
so.modalonly = true;
so = ss.option(form.Flag, 'enabled', _('Enable'));
so.default = so.enabled;
so.rmempty = false;
so.editable = true;
so = ss.option(form.ListValue, 'node', _('Node'),
_('Outbound node'));
so.value('urltest', _('URLTest'));
for (let i in proxy_nodes)
so.value(i, proxy_nodes[i]);
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'routing_node', 'node');
so.editable = true;
so = ss.option(form.ListValue, 'domain_resolver', _('Domain resolver'),
_('For resolving domain name in the server address.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('', _('Default'));
this.value('default-dns', _('Default DNS (issued by WAN)'));
this.value('system-dns', _('System DNS'));
uci.sections(data[0], 'dns_server', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.depends({'node': 'urltest', '!reverse': true});
so.modalonly = true;
so = ss.option(form.ListValue, 'domain_strategy', _('Domain strategy'),
_('The domain strategy for resolving the domain name in the address.'));
for (let i in hp.dns_strategy)
so.value(i, hp.dns_strategy[i]);
so.depends({'node': 'urltest', '!reverse': true});
so.modalonly = true;
so = ss.option(widgets.DeviceSelect, 'bind_interface', _('Bind interface'),
_('The network interface to bind to.'));
so.multiple = false;
so.noaliases = true;
so.depends({'outbound': '', 'node': /^((?!urltest$).)+$/});
so.modalonly = true;
so = ss.option(form.ListValue, 'outbound', _('Outbound'),
_('The tag of the upstream outbound.<br/>Other dial fields will be ignored when enabled.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('', _('Direct'));
uci.sections(data[0], 'routing_node', (res) => {
if (res['.name'] !== section_id && res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.validate = function(section_id, value) {
if (section_id && value) {
let node = this.section.formvalue(section_id, 'node');
let conflict = false;
uci.sections(data[0], 'routing_node', (res) => {
if (res['.name'] !== section_id) {
if (res.outbound === section_id && res['.name'] == value)
conflict = true;
else if (res.node === 'urltest' && res.urltest_nodes?.includes(node) && res['.name'] == value)
conflict = true;
}
});
if (conflict)
return _('Recursive outbound detected!');
}
return true;
}
so.depends({'node': 'urltest', '!reverse': true});
so.editable = true;
so = ss.option(hp.CBIStaticList, 'urltest_nodes', _('URLTest nodes'),
_('List of nodes to test.'));
for (let i in proxy_nodes)
so.value(i, proxy_nodes[i]);
so.depends('node', 'urltest');
so.validate = function(section_id) {
let value = this.section.formvalue(section_id, 'urltest_nodes');
if (section_id && !value.length)
return _('Expecting: %s').format(_('non-empty value'));
return true;
}
so.modalonly = true;
so = ss.option(form.Value, 'urltest_url', _('Test URL'),
_('The URL to test.'));
so.placeholder = 'https://www.gstatic.com/generate_204';
so.validate = function(section_id, value) {
if (section_id && value) {
try {
let url = new URL(value);
if (!url.hostname)
return _('Expecting: %s').format(_('valid URL'));
}
catch(e) {
return _('Expecting: %s').format(_('valid URL'));
}
}
return true;
}
so.depends('node', 'urltest');
so.modalonly = true;
so = ss.option(form.Value, 'urltest_interval', _('Test interval'),
_('The test interval in seconds.'));
so.datatype = 'uinteger';
so.placeholder = '180';
so.validate = function(section_id, value) {
if (section_id && value) {
let idle_timeout = this.section.formvalue(section_id, 'idle_timeout') || '1800';
if (parseInt(value) > parseInt(idle_timeout))
return _('Test interval must be less or equal than idle timeout.');
}
return true;
}
so.depends('node', 'urltest');
so.modalonly = true;
so = ss.option(form.Value, 'urltest_tolerance', _('Test tolerance'),
_('The test tolerance in milliseconds.'));
so.datatype = 'uinteger';
so.placeholder = '50';
so.depends('node', 'urltest');
so.modalonly = true;
so = ss.option(form.Value, 'urltest_idle_timeout', _('Idle timeout'),
_('The idle timeout in seconds.'));
so.datatype = 'uinteger';
so.placeholder = '1800';
so.depends('node', 'urltest');
so.modalonly = true;
so = ss.option(form.Flag, 'urltest_interrupt_exist_connections', _('Interrupt existing connections'),
_('Interrupt existing connections when the selected outbound has changed.'));
so.depends('node', 'urltest');
so.modalonly = true;
/* Routing nodes end */
/* Routing rules start */
s.tab('routing_rule', _('Routing Rules'));
o = s.taboption('routing_rule', form.SectionValue, '_routing_rule', form.GridSection, 'routing_rule');
o.depends('routing_mode', 'custom');
ss = o.subsection;
ss.addremove = true;
ss.rowcolors = true;
ss.sortable = true;
ss.nodescriptions = true;
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('Routing rule'), _('Add a routing rule'), data[0]);
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
ss.tab('field_other', _('Other fields'));
ss.tab('field_host', _('Host/IP fields'));
ss.tab('field_port', _('Port fields'));
ss.tab('fields_process', _('Process fields'));
so = ss.taboption('field_other', form.Value, 'label', _('Label'));
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'routing_rule', 'label');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'enabled', _('Enable'));
so.default = so.enabled;
so.rmempty = false;
so.editable = true;
so = ss.taboption('field_other', form.ListValue, 'mode', _('Mode'),
_('The default rule uses the following matching logic:<br/>' +
'<code>(domain || domain_suffix || domain_keyword || domain_regex || ip_cidr || ip_is_private)</code> &&<br/>' +
'<code>(port || port_range)</code> &&<br/>' +
'<code>(source_ip_cidr || source_ip_is_private)</code> &&<br/>' +
'<code>(source_port || source_port_range)</code> &&<br/>' +
'<code>other fields</code>.<br/>' +
'Additionally, included rule sets can be considered merged rather than as a single rule sub-item.'));
so.value('default', _('Default'));
so.default = 'default';
so.rmempty = false;
so.readonly = true;
so = ss.taboption('field_other', form.ListValue, 'ip_version', _('IP version'),
_('4 or 6. Not limited if empty.'));
so.value('4', _('IPv4'));
so.value('6', _('IPv6'));
so.value('', _('Both'));
so.modalonly = true;
so = ss.taboption('field_other', form.MultiValue, 'protocol', _('Protocol'),
_('Sniffed protocol, see <a target="_blank" href="https://sing-box.sagernet.org/configuration/route/sniff/">Sniff</a> for details.'));
so.value('bittorrent', _('BitTorrent'));
so.value('dns', _('DNS'));
so.value('dtls', _('DTLS'));
so.value('http', _('HTTP'));
so.value('quic', _('QUIC'));
so.value('rdp', _('RDP'));
so.value('ssh', _('SSH'));
so.value('stun', _('STUN'));
so.value('tls', _('TLS'));
so = ss.taboption('field_other', form.Value, 'client', _('Client'),
_('Sniffed client type (QUIC client type or SSH client name).'));
so.value('chromium', _('Chromium / Cronet'));
so.value('firefox', _('Firefox / uquic firefox'));
so.value('quic-go', _('quic-go / uquic chrome'));
so.value('safari', _('Safari / Apple Network API'));
so.depends('protocol', 'quic');
so.depends('protocol', 'ssh');
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'network', _('Network'));
so.value('tcp', _('TCP'));
so.value('udp', _('UDP'));
so.value('', _('Both'));
so = ss.taboption('field_other', form.DynamicList, 'user', _('User'),
_('Match user name.'));
so.modalonly = true;
so = ss.taboption('field_other', hp.CBIStaticList, 'rule_set', _('Rule set'),
_('Match rule set.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
uci.sections(data[0], 'ruleset', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'rule_set_ip_cidr_match_source', _('Rule set IP CIDR as source IP'),
_('Make IP CIDR in rule set used to match the source IP.'));
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'invert', _('Invert'),
_('Invert match result.'));
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'action', _('Action'));
so.value('route', _('Route'));
so.value('route-options', _('Route options'));
so.value('reject', _('Reject'));
so.value('resolve', _('Resolve'));
so.default = 'route';
so.rmempty = false;
so.editable = true;
so = ss.taboption('field_other', form.ListValue, 'outbound', _('Outbound'),
_('Tag of the target outbound.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('direct-out', _('Direct'));
uci.sections(data[0], 'routing_node', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.rmempty = false;
so.depends('action', 'route');
so.editable = true;
so = ss.taboption('field_other', form.Value, 'override_address', _('Override address'),
_('Override the connection destination address.'));
so.datatype = 'ipaddr';
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Value, 'override_port', _('Override port'),
_('Override the connection destination port.'));
so.datatype = 'port';
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'udp_disable_domain_unmapping', _('Disable UDP domain unmapping'),
_('If enabled, for UDP proxy requests addressed to a domain, the original packet address will be sent in the response instead of the mapped domain.'));
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'udp_connect', _('connect UDP connections'),
_('If enabled, attempts to connect UDP connection to the destination instead of listen.'));
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Value, 'udp_timeout', _('UDP timeout'),
_('Timeout for UDP connections.<br/>Setting a larger value than the UDP timeout in inbounds will have no effect.'));
so.datatype = 'uinteger';
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'tls_record_fragment', _('TLS record fragment'),
_('Fragment TLS handshake into multiple TLS records.'));
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'tls_fragment', _('TLS fragment'),
_('Fragment TLS handshakes. Due to poor performance, try <code>%s</code> first.').format(
_('TLS record fragment')));
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Value, 'tls_fragment_fallback_delay', _('Fragment fallback delay'),
_('The fallback value in milliseconds used when TLS segmentation cannot automatically determine the wait time.'));
so.datatype = 'uinteger';
so.placeholder = '500';
so.depends('tls_fragment', '1');
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'resolve_server', _('DNS server'),
_('Specifies DNS server tag to use instead of selecting through DNS routing.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('', _('Default'));
this.value('default-dns', _('Default DNS (issued by WAN)'));
this.value('system-dns', _('System DNS'));
uci.sections(data[0], 'dns_server', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.depends('action', 'resolve');
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'reject_method', _('Method'));
so.value('default', _('Reply with TCP RST / ICMP port unreachable'));
so.value('drop', _('Drop packets'));
so.depends('action', 'reject');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'reject_no_drop', _('Don\'t drop packets'),
_('<code>%s</code> will be temporarily overwritten to <code>%s</code> after 50 triggers in 30s if not enabled.').format(
_('Method'), _('Drop packets')));
so.depends('reject_method', 'default');
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'resolve_strategy', _('Resolve strategy'),
_('Domain strategy for resolving the domain names.'));
for (let i in hp.dns_strategy)
so.value(i, hp.dns_strategy[i]);
so.depends('action', 'resolve');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'resolve_disable_cache', _('Disable DNS cache'),
_('Disable DNS cache in this query.'));
so.depends('action', 'resolve');
so.modalonly = true;
so = ss.taboption('field_other', form.Value, 'resolve_rewrite_ttl', _('Rewrite TTL'),
_('Rewrite TTL in DNS responses.'));
so.datatype = 'uinteger';
so.depends('action', 'resolve');
so.modalonly = true;
so = ss.taboption('field_other', form.Value, 'resolve_client_subnet', _('EDNS Client subnet'),
_('Append a <code>edns0-subnet</code> OPT extra record with the specified IP prefix to every query by default.<br/>' +
'If value is an IP address instead of prefix, <code>/32</code> or <code>/128</code> will be appended automatically.'));
so.datatype = 'or(cidr, ipaddr)';
so.depends('action', 'resolve');
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'domain', _('Domain name'),
_('Match full domain.'));
so.datatype = 'hostname';
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'domain_suffix', _('Domain suffix'),
_('Match domain suffix.'));
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'domain_keyword', _('Domain keyword'),
_('Match domain using keyword.'));
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'domain_regex', _('Domain regex'),
_('Match domain using regular expression.'));
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'source_ip_cidr', _('Source IP CIDR'),
_('Match source IP CIDR.'));
so.datatype = 'or(cidr, ipaddr)';
so.modalonly = true;
so = ss.taboption('field_host', form.Flag, 'source_ip_is_private', _('Match private source IP'));
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'ip_cidr', _('IP CIDR'),
_('Match IP CIDR.'));
so.datatype = 'or(cidr, ipaddr)';
so.modalonly = true;
so = ss.taboption('field_host', form.Flag, 'ip_is_private', _('Match private IP'));
so.modalonly = true;
so = ss.taboption('field_port', form.DynamicList, 'source_port', _('Source port'),
_('Match source port.'));
so.datatype = 'port';
so.modalonly = true;
so = ss.taboption('field_port', form.DynamicList, 'source_port_range', _('Source port range'),
_('Match source port range. Format as START:/:END/START:END.'));
so.validate = hp.validatePortRange;
so.modalonly = true;
so = ss.taboption('field_port', form.DynamicList, 'port', _('Port'),
_('Match port.'));
so.datatype = 'port';
so.modalonly = true;
so = ss.taboption('field_port', form.DynamicList, 'port_range', _('Port range'),
_('Match port range. Format as START:/:END/START:END.'));
so.validate = hp.validatePortRange;
so.modalonly = true;
so = ss.taboption('fields_process', form.DynamicList, 'process_name', _('Process name'),
_('Match process name.'));
so.modalonly = true;
so = ss.taboption('fields_process', form.DynamicList, 'process_path', _('Process path'),
_('Match process path.'));
so.modalonly = true;
so = ss.taboption('fields_process', form.DynamicList, 'process_path_regex', _('Process path (regex)'),
_('Match process path using regular expression.'));
so.modalonly = true;
/* Routing rules end */
/* DNS settings start */
s.tab('dns', _('DNS Settings'));
o = s.taboption('dns', form.SectionValue, '_dns', form.NamedSection, 'dns', 'homeproxy');
o.depends('routing_mode', 'custom');
ss = o.subsection;
so = ss.option(form.ListValue, 'default_strategy', _('Default DNS strategy'),
_('The DNS strategy for resolving the domain name in the address.'));
for (let i in hp.dns_strategy)
so.value(i, hp.dns_strategy[i]);
so = ss.option(form.ListValue, 'default_server', _('Default DNS server'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('default-dns', _('Default DNS (issued by WAN)'));
this.value('system-dns', _('System DNS'));
uci.sections(data[0], 'dns_server', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.default = 'default-dns';
so.rmempty = false;
so = ss.option(form.Flag, 'disable_cache', _('Disable DNS cache'));
so = ss.option(form.Flag, 'disable_cache_expire', _('Disable cache expire'));
so.depends('disable_cache', '0');
so = ss.option(form.Flag, 'independent_cache', _('Independent cache per server'),
_('Make each DNS server\'s cache independent for special purposes. If enabled, will slightly degrade performance.'));
so.depends('disable_cache', '0');
so = ss.option(form.Value, 'client_subnet', _('EDNS Client subnet'),
_('Append a <code>edns0-subnet</code> OPT extra record with the specified IP prefix to every query by default.<br/>' +
'If value is an IP address instead of prefix, <code>/32</code> or <code>/128</code> will be appended automatically.'));
so.datatype = 'or(cidr, ipaddr)';
so = ss.option(form.Flag, 'cache_file_store_rdrc', _('Store RDRC'),
_('Store rejected DNS response cache.<br/>' +
'The check results of <code>Address filter DNS rule items</code> will be cached until expiration.'));
so = ss.option(form.Value, 'cache_file_rdrc_timeout', _('RDRC timeout'),
_('Timeout of rejected DNS response cache in seconds. <code>604800 (7d)</code> is used by default.'));
so.datatype = 'uinteger';
so.depends('cache_file_store_rdrc', '1');
/* DNS settings end */
/* DNS servers start */
s.tab('dns_server', _('DNS Servers'));
o = s.taboption('dns_server', form.SectionValue, '_dns_server', form.GridSection, 'dns_server');
o.depends('routing_mode', 'custom');
ss = o.subsection;
ss.addremove = true;
ss.rowcolors = true;
ss.sortable = true;
ss.nodescriptions = true;
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('DNS server'), _('Add a DNS server'), data[0]);
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
so = ss.option(form.Value, 'label', _('Label'));
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'dns_server', 'label');
so.modalonly = true;
so = ss.option(form.Flag, 'enabled', _('Enable'));
so.default = so.enabled;
so.rmempty = false;
so.editable = true;
so = ss.option(form.ListValue, 'type', _('Type'));
so.value('udp', _('UDP'));
so.value('tcp', _('TCP'));
so.value('tls', _('TLS'));
so.value('https', _('HTTPS'));
so.value('http3', _('HTTP3'));
so.value('quic', _('QUIC'));
so.default = 'udp';
so.rmempty = false;
so = ss.option(form.Value, 'server', _('Address'),
_('The address of the dns server.'));
so.datatype = 'or(hostname, ipaddr)';
so.rmempty = false;
so = ss.option(form.Value, 'server_port', _('Port'),
_('The port of the DNS server.'));
so.datatype = 'port';
so = ss.option(form.Value, 'path', _('Path'),
_('The path of the DNS server.'));
so.placeholder = '/dns-query';
so.depends('type', 'https');
so.depends('type', 'http3');
so.modalonly = true;
so = ss.option(form.DynamicList, 'headers', _('Headers'),
_('Additional headers to be sent to the DNS server.'));
so.depends('type', 'https');
so.depends('type', 'http3');
so.modalonly = true;
so = ss.option(form.Value, 'tls_sni', _('TLS SNI'),
_('Used to verify the hostname on the returned certificates.'));
so.depends('type', 'tls');
so.depends('type', 'https');
so.depends('type', 'http3');
so.depends('type', 'quic');
so.modalonly = true;
so = ss.option(form.ListValue, 'address_resolver', _('Address resolver'),
_('Tag of a another server to resolve the domain name in the address. Required if address contains domain.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('', _('None'));
this.value('default-dns', _('Default DNS (issued by WAN)'));
this.value('system-dns', _('System DNS'));
uci.sections(data[0], 'dns_server', (res) => {
if (res['.name'] !== section_id && res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.validate = function(section_id, value) {
if (section_id && value) {
let conflict = false;
uci.sections(data[0], 'dns_server', (res) => {
if (res['.name'] !== section_id)
if (res.address_resolver === section_id && res['.name'] == value)
conflict = true;
});
if (conflict)
return _('Recursive resolver detected!');
}
return true;
}
so.modalonly = true;
so = ss.option(form.ListValue, 'address_strategy', _('Address strategy'),
_('The domain strategy for resolving the domain name in the address.'));
for (let i in hp.dns_strategy)
so.value(i, hp.dns_strategy[i]);
so.depends({'address_resolver': '', '!reverse': true});
so.modalonly = true;
so = ss.option(form.ListValue, 'outbound', _('Outbound'),
_('Tag of an outbound for connecting to the dns server.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('direct-out', _('Direct'));
uci.sections(data[0], 'routing_node', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.default = 'direct-out';
so.rmempty = false;
so.editable = true;
/* DNS servers end */
/* DNS rules start */
s.tab('dns_rule', _('DNS Rules'));
o = s.taboption('dns_rule', form.SectionValue, '_dns_rule', form.GridSection, 'dns_rule');
o.depends('routing_mode', 'custom');
ss = o.subsection;
ss.addremove = true;
ss.rowcolors = true;
ss.sortable = true;
ss.nodescriptions = true;
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('DNS rule'), _('Add a DNS rule'), data[0]);
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
ss.tab('field_other', _('Other fields'));
ss.tab('field_host', _('Host/IP fields'));
ss.tab('field_port', _('Port fields'));
ss.tab('fields_process', _('Process fields'));
so = ss.taboption('field_other', form.Value, 'label', _('Label'));
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'dns_rule', 'label');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'enabled', _('Enable'));
so.default = so.enabled;
so.rmempty = false;
so.editable = true;
so = ss.taboption('field_other', form.ListValue, 'mode', _('Mode'),
_('The default rule uses the following matching logic:<br/>' +
'<code>(domain || domain_suffix || domain_keyword || domain_regex)</code> &&<br/>' +
'<code>(port || port_range)</code> &&<br/>' +
'<code>(source_ip_cidr || source_ip_is_private)</code> &&<br/>' +
'<code>(source_port || source_port_range)</code> &&<br/>' +
'<code>other fields</code>.<br/>' +
'Additionally, included rule sets can be considered merged rather than as a single rule sub-item.'));
so.value('default', _('Default'));
so.default = 'default';
so.rmempty = false;
so.readonly = true;
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'ip_version', _('IP version'));
so.value('4', _('IPv4'));
so.value('6', _('IPv6'));
so.value('', _('Both'));
so.modalonly = true;
so = ss.taboption('field_other', form.DynamicList, 'query_type', _('Query type'),
_('Match query type.'));
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'network', _('Network'));
so.value('tcp', _('TCP'));
so.value('udp', _('UDP'));
so.value('', _('Both'));
so = ss.taboption('field_other', form.MultiValue, 'protocol', _('Protocol'),
_('Sniffed protocol, see <a target="_blank" href="https://sing-box.sagernet.org/configuration/route/sniff/">Sniff</a> for details.'));
so.value('bittorrent', _('BitTorrent'));
so.value('dtls', _('DTLS'));
so.value('http', _('HTTP'));
so.value('quic', _('QUIC'));
so.value('rdp', _('RDP'));
so.value('ssh', _('SSH'));
so.value('stun', _('STUN'));
so.value('tls', _('TLS'));
so = ss.taboption('field_other', form.DynamicList, 'user', _('User'),
_('Match user name.'));
so.modalonly = true;
so = ss.taboption('field_other', hp.CBIStaticList, 'rule_set', _('Rule set'),
_('Match rule set.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
uci.sections(data[0], 'ruleset', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'rule_set_ip_cidr_match_source', _('Rule set IP CIDR as source IP'),
_('Make IP CIDR in rule sets match the source IP.'));
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'rule_set_ip_cidr_accept_empty', _('Accept empty query response'),
_('Make IP CIDR in rule-sets accept empty query response.'));
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'invert', _('Invert'),
_('Invert match result.'));
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'action', _('Action'));
so.value('route', _('Route'));
so.value('route-options', _('Route options'));
so.value('reject', _('Reject'));
so.value('predefined', _('Predefined'));
so.default = 'route';
so.rmempty = false;
so.editable = true;
so = ss.taboption('field_other', form.ListValue, 'server', _('Server'),
_('Tag of the target dns server.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('default-dns', _('Default DNS (issued by WAN)'));
this.value('system-dns', _('System DNS'));
uci.sections(data[0], 'dns_server', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.rmempty = false;
so.editable = true;
so.depends('action', 'route');
so = ss.taboption('field_other', form.ListValue, 'domain_strategy', _('Domain strategy'),
_('Set domain strategy for this query.'));
for (let i in hp.dns_strategy)
so.value(i, hp.dns_strategy[i]);
so.depends('action', 'route');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'dns_disable_cache', _('Disable dns cache'),
_('Disable cache and save cache in this query.'));
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Value, 'rewrite_ttl', _('Rewrite TTL'),
_('Rewrite TTL in DNS responses.'));
so.datatype = 'uinteger';
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.Value, 'client_subnet', _('EDNS Client subnet'),
_('Append a <code>edns0-subnet</code> OPT extra record with the specified IP prefix to every query by default.<br/>' +
'If value is an IP address instead of prefix, <code>/32</code> or <code>/128</code> will be appended automatically.'));
so.datatype = 'or(cidr, ipaddr)';
so.depends('action', 'route');
so.depends('action', 'route-options');
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'reject_method', _('Method'));
so.value('default', _('Reply with REFUSED'));
so.value('drop', _('Drop requests'));
so.default = 'default';
so.depends('action', 'reject');
so.modalonly = true;
so = ss.taboption('field_other', form.Flag, 'reject_no_drop', _('Don\'t drop requests'),
_('<code>%s</code> will be temporarily overwritten to <code>%s</code> after 50 triggers in 30s if not enabled.').format(
_('Method'), _('Drop requests')));
so.depends('reject_method', 'default');
so.modalonly = true;
so = ss.taboption('field_other', form.ListValue, 'predefined_rcode', _('RCode'),
_('The response code.'));
so.value('NOERROR');
so.value('FORMERR');
so.value('SERVFAIL');
so.value('NXDOMAIN');
so.value('NOTIMP');
so.value('REFUSED');
so.default = 'NOERROR';
so.depends('action', 'predefined');
so.modalonly = true;
so = ss.taboption('field_other', form.DynamicList, 'predefined_answer', _('Answer'),
_('List of text DNS record to respond as answers.'));
so.depends('action', 'predefined');
so.modalonly = true;
so = ss.taboption('field_other', form.DynamicList, 'predefined_ns', _('NS'),
_('List of text DNS record to respond as name servers.'));
so.depends('action', 'predefined');
so.modalonly = true;
so = ss.taboption('field_other', form.DynamicList, 'predefined_extra', _('Extra records'),
_('List of text DNS record to respond as extra records.'));
so.depends('action', 'predefined');
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'domain', _('Domain name'),
_('Match full domain.'));
so.datatype = 'hostname';
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'domain_suffix', _('Domain suffix'),
_('Match domain suffix.'));
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'domain_keyword', _('Domain keyword'),
_('Match domain using keyword.'));
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'domain_regex', _('Domain regex'),
_('Match domain using regular expression.'));
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'source_ip_cidr', _('Source IP CIDR'),
_('Match source IP CIDR.'));
so.datatype = 'or(cidr, ipaddr)';
so.modalonly = true;
so = ss.taboption('field_host', form.Flag, 'source_ip_is_private', _('Match private source IP'));
so.modalonly = true;
so = ss.taboption('field_host', form.DynamicList, 'ip_cidr', _('IP CIDR'),
_('Match IP CIDR with query response. Current rule will be skipped if not match.'));
so.datatype = 'or(cidr, ipaddr)';
so.modalonly = true;
so = ss.taboption('field_host', form.Flag, 'ip_is_private', _('Match private IP'),
_('Match private IP with query response.'));
so.modalonly = true;
so = ss.taboption('field_port', form.DynamicList, 'source_port', _('Source port'),
_('Match source port.'));
so.datatype = 'port';
so.modalonly = true;
so = ss.taboption('field_port', form.DynamicList, 'source_port_range', _('Source port range'),
_('Match source port range. Format as START:/:END/START:END.'));
so.validate = hp.validatePortRange;
so.modalonly = true;
so = ss.taboption('field_port', form.DynamicList, 'port', _('Port'),
_('Match port.'));
so.datatype = 'port';
so.modalonly = true;
so = ss.taboption('field_port', form.DynamicList, 'port_range', _('Port range'),
_('Match port range. Format as START:/:END/START:END.'));
so.validate = hp.validatePortRange;
so.modalonly = true;
so = ss.taboption('fields_process', form.DynamicList, 'process_name', _('Process name'),
_('Match process name.'));
so.modalonly = true;
so = ss.taboption('fields_process', form.DynamicList, 'process_path', _('Process path'),
_('Match process path.'));
so.modalonly = true;
so = ss.taboption('fields_process', form.DynamicList, 'process_path_regex', _('Process path (regex)'),
_('Match process path using regular expression.'));
so.modalonly = true;
/* DNS rules end */
/* Custom routing settings end */
/* Rule set settings start */
s.tab('ruleset', _('Rule Set'));
o = s.taboption('ruleset', form.SectionValue, '_ruleset', form.GridSection, 'ruleset');
o.depends('routing_mode', 'custom');
ss = o.subsection;
ss.addremove = true;
ss.rowcolors = true;
ss.sortable = true;
ss.nodescriptions = true;
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('Rule set'), _('Add a rule set'), data[0]);
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
so = ss.option(form.Value, 'label', _('Label'));
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'ruleset', 'label');
so.modalonly = true;
so = ss.option(form.Flag, 'enabled', _('Enable'));
so.default = so.enabled;
so.rmempty = false;
so.editable = true;
so = ss.option(form.ListValue, 'type', _('Type'));
so.value('local', _('Local'));
so.value('remote', _('Remote'));
so.default = 'remote';
so.rmempty = false;
so = ss.option(form.ListValue, 'format', _('Format'));
so.value('binary', _('Binary file'));
so.value('source', _('Source file'));
so.default = 'binary';
so.rmempty = false;
so = ss.option(form.Value, 'path', _('Path'));
so.datatype = 'file';
so.placeholder = '/etc/homeproxy/ruleset/example.json';
so.rmempty = false;
so.depends('type', 'local');
so.modalonly = true;
so = ss.option(form.Value, 'url', _('Rule set URL'));
so.validate = function(section_id, value) {
if (section_id) {
if (!value)
return _('Expecting: %s').format(_('non-empty value'));
try {
let url = new URL(value);
if (!url.hostname)
return _('Expecting: %s').format(_('valid URL'));
}
catch(e) {
return _('Expecting: %s').format(_('valid URL'));
}
}
return true;
}
so.rmempty = false;
so.depends('type', 'remote');
so.modalonly = true;
so = ss.option(form.ListValue, 'outbound', _('Outbound'),
_('Tag of the outbound to download rule set.'));
so.load = function(section_id) {
delete this.keylist;
delete this.vallist;
this.value('', _('Default'));
this.value('direct-out', _('Direct'));
uci.sections(data[0], 'routing_node', (res) => {
if (res.enabled === '1')
this.value(res['.name'], res.label);
});
return this.super('load', section_id);
}
so.depends('type', 'remote');
so = ss.option(form.Value, 'update_interval', _('Update interval'),
_('Update interval of rule set.'));
so.placeholder = '1d';
so.depends('type', 'remote');
/* Rule set settings end */
/* ACL settings start */
s.tab('control', _('Access Control'));
o = s.taboption('control', form.SectionValue, '_control', form.NamedSection, 'control', 'homeproxy');
ss = o.subsection;
/* Interface control start */
ss.tab('interface', _('Interface Control'));
so = ss.taboption('interface', widgets.DeviceSelect, 'listen_interfaces', _('Listen interfaces'),
_('Only process traffic from specific interfaces. Leave empty for all.'));
so.multiple = true;
so.noaliases = true;
so = ss.taboption('interface', widgets.DeviceSelect, 'bind_interface', _('Bind interface'),
_('Bind outbound traffic to specific interface. Leave empty to auto detect.'));
so.multiple = false;
so.noaliases = true;
/* Interface control end */
/* LAN IP policy start */
ss.tab('lan_ip_policy', _('LAN IP Policy'));
so = ss.taboption('lan_ip_policy', form.ListValue, 'lan_proxy_mode', _('Proxy filter mode'));
so.value('disabled', _('Disable'));
so.value('listed_only', _('Proxy listed only'));
so.value('except_listed', _('Proxy all except listed'));
so.default = 'disabled';
so.rmempty = false;
so = fwtool.addIPOption(ss, 'lan_ip_policy', 'lan_direct_ipv4_ips', _('Direct IPv4 IP-s'), null, 'ipv4', hosts, true);
so.depends('lan_proxy_mode', 'except_listed');
so = fwtool.addIPOption(ss, 'lan_ip_policy', 'lan_direct_ipv6_ips', _('Direct IPv6 IP-s'), null, 'ipv6', hosts, true);
so.depends({'lan_proxy_mode': 'except_listed', 'homeproxy.config.ipv6_support': '1'});
so = fwtool.addMACOption(ss, 'lan_ip_policy', 'lan_direct_mac_addrs', _('Direct MAC-s'), null, hosts);
so.depends('lan_proxy_mode', 'except_listed');
so = fwtool.addIPOption(ss, 'lan_ip_policy', 'lan_proxy_ipv4_ips', _('Proxy IPv4 IP-s'), null, 'ipv4', hosts, true);
so.depends('lan_proxy_mode', 'listed_only');
so = fwtool.addIPOption(ss, 'lan_ip_policy', 'lan_proxy_ipv6_ips', _('Proxy IPv6 IP-s'), null, 'ipv6', hosts, true);
so.depends({'lan_proxy_mode': 'listed_only', 'homeproxy.config.ipv6_support': '1'});
so = fwtool.addMACOption(ss, 'lan_ip_policy', 'lan_proxy_mac_addrs', _('Proxy MAC-s'), null, hosts);
so.depends('lan_proxy_mode', 'listed_only');
so = fwtool.addIPOption(ss, 'lan_ip_policy', 'lan_gaming_mode_ipv4_ips', _('Gaming mode IPv4 IP-s'), null, 'ipv4', hosts, true);
so = fwtool.addIPOption(ss, 'lan_ip_policy', 'lan_gaming_mode_ipv6_ips', _('Gaming mode IPv6 IP-s'), null, 'ipv6', hosts, true);
so.depends('homeproxy.config.ipv6_support', '1');
so = fwtool.addMACOption(ss, 'lan_ip_policy', 'lan_gaming_mode_mac_addrs', _('Gaming mode MAC-s'), null, hosts);
so = fwtool.addIPOption(ss, 'lan_ip_policy', 'lan_global_proxy_ipv4_ips', _('Global proxy IPv4 IP-s'), null, 'ipv4', hosts, true);
so.depends({'homeproxy.config.routing_mode': 'custom', '!reverse': true});
so = fwtool.addIPOption(ss, 'lan_ip_policy', 'lan_global_proxy_ipv6_ips', _('Global proxy IPv6 IP-s'), null, 'ipv6', hosts, true);
so.depends({'homeproxy.config.routing_mode': /^((?!custom).)+$/, 'homeproxy.config.ipv6_support': '1'});
so = fwtool.addMACOption(ss, 'lan_ip_policy', 'lan_global_proxy_mac_addrs', _('Global proxy MAC-s'), null, hosts);
so.depends({'homeproxy.config.routing_mode': 'custom', '!reverse': true});
/* LAN IP policy end */
/* WAN IP policy start */
ss.tab('wan_ip_policy', _('WAN IP Policy'));
so = ss.taboption('wan_ip_policy', form.DynamicList, 'wan_proxy_ipv4_ips', _('Proxy IPv4 IP-s'));
so.datatype = 'or(ip4addr, cidr4)';
so = ss.taboption('wan_ip_policy', form.DynamicList, 'wan_proxy_ipv6_ips', _('Proxy IPv6 IP-s'));
so.datatype = 'or(ip6addr, cidr6)';
so.depends('homeproxy.config.ipv6_support', '1');
so = ss.taboption('wan_ip_policy', form.DynamicList, 'wan_direct_ipv4_ips', _('Direct IPv4 IP-s'));
so.datatype = 'or(ip4addr, cidr4)';
so = ss.taboption('wan_ip_policy', form.DynamicList, 'wan_direct_ipv6_ips', _('Direct IPv6 IP-s'));
so.datatype = 'or(ip6addr, cidr6)';
so.depends('homeproxy.config.ipv6_support', '1');
/* WAN IP policy end */
/* Proxy domain list start */
ss.tab('proxy_domain_list', _('Proxy Domain List'));
so = ss.taboption('proxy_domain_list', form.TextValue, '_proxy_domain_list');
so.rows = 10;
so.monospace = true;
so.datatype = 'hostname';
so.depends({'homeproxy.config.routing_mode': 'custom', '!reverse': true});
so.load = function(/* ... */) {
return L.resolveDefault(callReadDomainList('proxy_list')).then((res) => {
return res.content;
}, {});
}
so.write = function(_section_id, value) {
return callWriteDomainList('proxy_list', value);
}
so.remove = function(/* ... */) {
let routing_mode = this.section.formvalue('config', 'routing_mode');
if (routing_mode !== 'custom')
return callWriteDomainList('proxy_list', '');
return true;
}
so.validate = function(section_id, value) {
if (section_id && value)
for (let i of value.split('\n'))
if (i && !stubValidator.apply('hostname', i))
return _('Expecting: %s').format(_('valid hostname'));
return true;
}
/* Proxy domain list end */
/* Direct domain list start */
ss.tab('direct_domain_list', _('Direct Domain List'));
so = ss.taboption('direct_domain_list', form.TextValue, '_direct_domain_list');
so.rows = 10;
so.monospace = true;
so.datatype = 'hostname';
so.depends({'homeproxy.config.routing_mode': 'custom', '!reverse': true});
so.load = function(/* ... */) {
return L.resolveDefault(callReadDomainList('direct_list')).then((res) => {
return res.content;
}, {});
}
so.write = function(_section_id, value) {
return callWriteDomainList('direct_list', value);
}
so.remove = function(/* ... */) {
let routing_mode = this.section.formvalue('config', 'routing_mode');
if (routing_mode !== 'custom')
return callWriteDomainList('direct_list', '');
return true;
}
so.validate = function(section_id, value) {
if (section_id && value)
for (let i of value.split('\n'))
if (i && !stubValidator.apply('hostname', i))
return _('Expecting: %s').format(_('valid hostname'));
return true;
}
/* Direct domain list end */
/* ACL settings end */
return m.render();
}
});
|
2929004360/ruoyi-sign
| 1,312
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/TreeEntity.java
|
package com.ruoyi.common.core.domain;
import java.util.ArrayList;
import java.util.List;
/**
* Tree基类
*
* @author ruoyi
*/
public class TreeEntity extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 父菜单名称 */
private String parentName;
/** 父菜单ID */
private Long parentId;
/** 显示顺序 */
private Integer orderNum;
/** 祖级列表 */
private String ancestors;
/** 子部门 */
private List<?> children = new ArrayList<>();
public String getParentName()
{
return parentName;
}
public void setParentName(String parentName)
{
this.parentName = parentName;
}
public Long getParentId()
{
return parentId;
}
public void setParentId(Long parentId)
{
this.parentId = parentId;
}
public Integer getOrderNum()
{
return orderNum;
}
public void setOrderNum(Integer orderNum)
{
this.orderNum = orderNum;
}
public String getAncestors()
{
return ancestors;
}
public void setAncestors(String ancestors)
{
this.ancestors = ancestors;
}
public List<?> getChildren()
{
return children;
}
public void setChildren(List<?> children)
{
this.children = children;
}
}
|
2929004360/ruoyi-sign
| 1,966
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/TreeSelect.java
|
package com.ruoyi.common.core.domain;
import java.io.Serializable;
import java.util.List;
import java.util.stream.Collectors;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.ruoyi.common.constant.UserConstants;
import com.ruoyi.common.core.domain.entity.SysDept;
import com.ruoyi.common.core.domain.entity.SysMenu;
import com.ruoyi.common.utils.StringUtils;
/**
* Treeselect树结构实体类
*
* @author ruoyi
*/
public class TreeSelect implements Serializable
{
private static final long serialVersionUID = 1L;
/** 节点ID */
private Long id;
/** 节点名称 */
private String label;
/** 节点禁用 */
private boolean disabled = false;
/** 子节点 */
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<TreeSelect> children;
public TreeSelect()
{
}
public TreeSelect(SysDept dept)
{
this.id = dept.getDeptId();
this.label = dept.getDeptName();
this.disabled = StringUtils.equals(UserConstants.DEPT_DISABLE, dept.getStatus());
this.children = dept.getChildren().stream().map(TreeSelect::new).collect(Collectors.toList());
}
public TreeSelect(SysMenu menu)
{
this.id = menu.getMenuId();
this.label = menu.getMenuName();
this.children = menu.getChildren().stream().map(TreeSelect::new).collect(Collectors.toList());
}
public Long getId()
{
return id;
}
public void setId(Long id)
{
this.id = id;
}
public String getLabel()
{
return label;
}
public void setLabel(String label)
{
this.label = label;
}
public boolean isDisabled()
{
return disabled;
}
public void setDisabled(boolean disabled)
{
this.disabled = disabled;
}
public List<TreeSelect> getChildren()
{
return children;
}
public void setChildren(List<TreeSelect> children)
{
this.children = children;
}
}
|
28harishkumar/blog
| 1,752
|
resources/views/auth/login.blade.php
|
@extends('app')
@section('content')
<div class="container-fluid">
<div class="row">
<div class="col-md-8 col-md-offset-2">
<div class="panel panel-default">
<div class="panel-heading">Login</div>
<div class="panel-body">
@if (count($errors) > 0)
<div class="alert alert-danger">
<strong>Whoops!</strong> There were some problems with your input.<br><br>
<ul>
@foreach ($errors->all() as $error)
<li>{{ $error }}</li>
@endforeach
</ul>
</div>
@endif
<form class="form-horizontal" role="form" method="POST" action="{{ url('/auth/login') }}">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<div class="form-group">
<label class="col-md-4 control-label">E-Mail Address</label>
<div class="col-md-6">
<input type="email" class="form-control" name="email" value="{{ old('email') }}">
</div>
</div>
<div class="form-group">
<label class="col-md-4 control-label">Password</label>
<div class="col-md-6">
<input type="password" class="form-control" name="password">
</div>
</div>
<div class="form-group">
<div class="col-md-6 col-md-offset-4">
<div class="checkbox">
<label>
<input type="checkbox" name="remember"> Remember Me
</label>
</div>
</div>
</div>
<div class="form-group">
<div class="col-md-6 col-md-offset-4">
<button type="submit" class="btn btn-primary">Login</button>
<a class="btn btn-link" href="{{ url('/password/email') }}">Forgot Your Password?</a>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
@endsection
|
2929004360/ruoyi-sign
| 3,912
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/AjaxResult.java
|
package com.ruoyi.common.core.domain;
import java.util.HashMap;
import java.util.Objects;
import com.ruoyi.common.constant.HttpStatus;
import com.ruoyi.common.utils.StringUtils;
/**
* 操作消息提醒
*
* @author ruoyi
*/
public class AjaxResult extends HashMap<String, Object>
{
private static final long serialVersionUID = 1L;
/** 状态码 */
public static final String CODE_TAG = "code";
/** 返回内容 */
public static final String MSG_TAG = "msg";
/** 数据对象 */
public static final String DATA_TAG = "data";
/**
* 初始化一个新创建的 AjaxResult 对象,使其表示一个空消息。
*/
public AjaxResult()
{
}
/**
* 初始化一个新创建的 AjaxResult 对象
*
* @param code 状态码
* @param msg 返回内容
*/
public AjaxResult(int code, String msg)
{
super.put(CODE_TAG, code);
super.put(MSG_TAG, msg);
}
/**
* 初始化一个新创建的 AjaxResult 对象
*
* @param code 状态码
* @param msg 返回内容
* @param data 数据对象
*/
public AjaxResult(int code, String msg, Object data)
{
super.put(CODE_TAG, code);
super.put(MSG_TAG, msg);
if (StringUtils.isNotNull(data))
{
super.put(DATA_TAG, data);
}
}
/**
* 返回成功消息
*
* @return 成功消息
*/
public static AjaxResult success()
{
return AjaxResult.success("操作成功");
}
/**
* 返回成功数据
*
* @return 成功消息
*/
public static AjaxResult success(Object data)
{
return AjaxResult.success("操作成功", data);
}
/**
* 返回成功消息
*
* @param msg 返回内容
* @return 成功消息
*/
public static AjaxResult success(String msg)
{
return AjaxResult.success(msg, null);
}
/**
* 返回成功消息
*
* @param msg 返回内容
* @param data 数据对象
* @return 成功消息
*/
public static AjaxResult success(String msg, Object data)
{
return new AjaxResult(HttpStatus.SUCCESS, msg, data);
}
/**
* 返回警告消息
*
* @param msg 返回内容
* @return 警告消息
*/
public static AjaxResult warn(String msg)
{
return AjaxResult.warn(msg, null);
}
/**
* 返回警告消息
*
* @param msg 返回内容
* @param data 数据对象
* @return 警告消息
*/
public static AjaxResult warn(String msg, Object data)
{
return new AjaxResult(HttpStatus.WARN, msg, data);
}
/**
* 返回错误消息
*
* @return 错误消息
*/
public static AjaxResult error()
{
return AjaxResult.error("操作失败");
}
/**
* 返回错误消息
*
* @param msg 返回内容
* @return 错误消息
*/
public static AjaxResult error(String msg)
{
return AjaxResult.error(msg, null);
}
/**
* 返回错误消息
*
* @param msg 返回内容
* @param data 数据对象
* @return 错误消息
*/
public static AjaxResult error(String msg, Object data)
{
return new AjaxResult(HttpStatus.ERROR, msg, data);
}
/**
* 返回错误消息
*
* @param code 状态码
* @param msg 返回内容
* @return 错误消息
*/
public static AjaxResult error(int code, String msg)
{
return new AjaxResult(code, msg, null);
}
/**
* 是否为成功消息
*
* @return 结果
*/
public boolean isSuccess()
{
return Objects.equals(HttpStatus.SUCCESS, this.get(CODE_TAG));
}
/**
* 是否为警告消息
*
* @return 结果
*/
public boolean isWarn()
{
return Objects.equals(HttpStatus.WARN, this.get(CODE_TAG));
}
/**
* 是否为错误消息
*
* @return 结果
*/
public boolean isError()
{
return Objects.equals(HttpStatus.ERROR, this.get(CODE_TAG));
}
/**
* 方便链式调用
*
* @param key 键
* @param value 值
* @return 数据对象
*/
@Override
public AjaxResult put(String key, Object value)
{
super.put(key, value);
return this;
}
}
|
2929004360/ruoyi-sign
| 2,114
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/R.java
|
package com.ruoyi.common.core.domain;
import java.io.Serializable;
import com.ruoyi.common.constant.HttpStatus;
/**
* 响应信息主体
*
* @author ruoyi
*/
public class R<T> implements Serializable
{
private static final long serialVersionUID = 1L;
/** 成功 */
public static final int SUCCESS = HttpStatus.SUCCESS;
/** 失败 */
public static final int FAIL = HttpStatus.ERROR;
private int code;
private String msg;
private T data;
public static <T> R<T> ok()
{
return restResult(null, SUCCESS, "操作成功");
}
public static <T> R<T> ok(T data)
{
return restResult(data, SUCCESS, "操作成功");
}
public static <T> R<T> ok(T data, String msg)
{
return restResult(data, SUCCESS, msg);
}
public static <T> R<T> fail()
{
return restResult(null, FAIL, "操作失败");
}
public static <T> R<T> fail(String msg)
{
return restResult(null, FAIL, msg);
}
public static <T> R<T> fail(T data)
{
return restResult(data, FAIL, "操作失败");
}
public static <T> R<T> fail(T data, String msg)
{
return restResult(data, FAIL, msg);
}
public static <T> R<T> fail(int code, String msg)
{
return restResult(null, code, msg);
}
private static <T> R<T> restResult(T data, int code, String msg)
{
R<T> apiResult = new R<>();
apiResult.setCode(code);
apiResult.setData(data);
apiResult.setMsg(msg);
return apiResult;
}
public int getCode()
{
return code;
}
public void setCode(int code)
{
this.code = code;
}
public String getMsg()
{
return msg;
}
public void setMsg(String msg)
{
this.msg = msg;
}
public T getData()
{
return data;
}
public void setData(T data)
{
this.data = data;
}
public static <T> Boolean isError(R<T> ret)
{
return !isSuccess(ret);
}
public static <T> Boolean isSuccess(R<T> ret)
{
return R.SUCCESS == ret.getCode();
}
}
|
2929004360/ruoyi-sign
| 2,244
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/BaseEntity.java
|
package com.ruoyi.common.core.domain;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.ruoyi.common.annotation.Excel;
import java.io.Serializable;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* Entity基类
*
* @author ruoyi
*/
public class BaseEntity implements Serializable
{
private static final long serialVersionUID = 1L;
/** 搜索值 */
@JsonIgnore
private String searchValue;
/** 创建者 */
private String createBy;
/** 创建时间 */
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date createTime;
/** 更新者 */
private String updateBy;
/** 更新时间 */
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date updateTime;
/**
* 备注
*/
private String remark;
/** 请求参数 */
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Map<String, Object> params;
public String getSearchValue()
{
return searchValue;
}
public void setSearchValue(String searchValue)
{
this.searchValue = searchValue;
}
public String getCreateBy()
{
return createBy;
}
public void setCreateBy(String createBy)
{
this.createBy = createBy;
}
public Date getCreateTime()
{
return createTime;
}
public void setCreateTime(Date createTime)
{
this.createTime = createTime;
}
public String getUpdateBy()
{
return updateBy;
}
public void setUpdateBy(String updateBy)
{
this.updateBy = updateBy;
}
public Date getUpdateTime()
{
return updateTime;
}
public void setUpdateTime(Date updateTime)
{
this.updateTime = updateTime;
}
public String getRemark()
{
return remark;
}
public void setRemark(String remark)
{
this.remark = remark;
}
public Map<String, Object> getParams()
{
if (params == null)
{
params = new HashMap<>();
}
return params;
}
public void setParams(Map<String, Object> params)
{
this.params = params;
}
}
|
28harishkumar/blog
| 1,912
|
resources/views/auth/register.blade.php
|
@extends('app')
@section('content')
<div class="container-fluid">
<div class="row">
<div class="col-md-8 col-md-offset-2">
<div class="panel panel-default">
<div class="panel-heading">Register</div>
<div class="panel-body">
@if (count($errors) > 0)
<div class="alert alert-danger">
<strong>Whoops!</strong> There were some problems with your input.<br><br>
<ul>
@foreach ($errors->all() as $error)
<li>{{ $error }}</li>
@endforeach
</ul>
</div>
@endif
<form class="form-horizontal" role="form" method="POST" action="{{ url('/auth/register') }}">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<div class="form-group">
<label class="col-md-4 control-label">Name</label>
<div class="col-md-6">
<input type="text" class="form-control" name="name" value="{{ old('name') }}">
</div>
</div>
<div class="form-group">
<label class="col-md-4 control-label">E-Mail Address</label>
<div class="col-md-6">
<input type="email" class="form-control" name="email" value="{{ old('email') }}">
</div>
</div>
<div class="form-group">
<label class="col-md-4 control-label">Password</label>
<div class="col-md-6">
<input type="password" class="form-control" name="password">
</div>
</div>
<div class="form-group">
<label class="col-md-4 control-label">Confirm Password</label>
<div class="col-md-6">
<input type="password" class="form-control" name="password_confirmation">
</div>
</div>
<div class="form-group">
<div class="col-md-6 col-md-offset-4">
<button type="submit" class="btn btn-primary">
Register
</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
@endsection
|
28harishkumar/blog
| 1,753
|
resources/views/auth/reset.blade.php
|
@extends('app')
@section('content')
<div class="container-fluid">
<div class="row">
<div class="col-md-8 col-md-offset-2">
<div class="panel panel-default">
<div class="panel-heading">Reset Password</div>
<div class="panel-body">
@if (count($errors) > 0)
<div class="alert alert-danger">
<strong>Whoops!</strong> There were some problems with your input.<br><br>
<ul>
@foreach ($errors->all() as $error)
<li>{{ $error }}</li>
@endforeach
</ul>
</div>
@endif
<form class="form-horizontal" role="form" method="POST" action="{{ url('/password/reset') }}">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<input type="hidden" name="token" value="{{ $token }}">
<div class="form-group">
<label class="col-md-4 control-label">E-Mail Address</label>
<div class="col-md-6">
<input type="email" class="form-control" name="email" value="{{ old('email') }}">
</div>
</div>
<div class="form-group">
<label class="col-md-4 control-label">Password</label>
<div class="col-md-6">
<input type="password" class="form-control" name="password">
</div>
</div>
<div class="form-group">
<label class="col-md-4 control-label">Confirm Password</label>
<div class="col-md-6">
<input type="password" class="form-control" name="password_confirmation">
</div>
</div>
<div class="form-group">
<div class="col-md-6 col-md-offset-4">
<button type="submit" class="btn btn-primary">
Reset Password
</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
@endsection
|
2929004360/ruoyi-sign
| 4,062
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/entity/SysDictData.java
|
package com.ruoyi.common.core.domain.entity;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Size;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.ruoyi.common.annotation.Excel;
import com.ruoyi.common.annotation.Excel.ColumnType;
import com.ruoyi.common.constant.UserConstants;
import com.ruoyi.common.core.domain.BaseEntity;
/**
* 字典数据表 sys_dict_data
*
* @author ruoyi
*/
public class SysDictData extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 字典编码 */
@Excel(name = "字典编码", cellType = ColumnType.NUMERIC)
private Long dictCode;
/** 字典排序 */
@Excel(name = "字典排序", cellType = ColumnType.NUMERIC)
private Long dictSort;
/** 字典标签 */
@Excel(name = "字典标签")
private String dictLabel;
/** 字典键值 */
@Excel(name = "字典键值")
private String dictValue;
/** 字典类型 */
@Excel(name = "字典类型")
private String dictType;
/** 样式属性(其他样式扩展) */
private String cssClass;
/** 表格字典样式 */
private String listClass;
/** 是否默认(Y是 N否) */
@Excel(name = "是否默认", readConverterExp = "Y=是,N=否")
private String isDefault;
/** 状态(0正常 1停用) */
@Excel(name = "状态", readConverterExp = "0=正常,1=停用")
private String status;
public Long getDictCode()
{
return dictCode;
}
public void setDictCode(Long dictCode)
{
this.dictCode = dictCode;
}
public Long getDictSort()
{
return dictSort;
}
public void setDictSort(Long dictSort)
{
this.dictSort = dictSort;
}
@NotBlank(message = "字典标签不能为空")
@Size(min = 0, max = 100, message = "字典标签长度不能超过100个字符")
public String getDictLabel()
{
return dictLabel;
}
public void setDictLabel(String dictLabel)
{
this.dictLabel = dictLabel;
}
@NotBlank(message = "字典键值不能为空")
@Size(min = 0, max = 100, message = "字典键值长度不能超过100个字符")
public String getDictValue()
{
return dictValue;
}
public void setDictValue(String dictValue)
{
this.dictValue = dictValue;
}
@NotBlank(message = "字典类型不能为空")
@Size(min = 0, max = 100, message = "字典类型长度不能超过100个字符")
public String getDictType()
{
return dictType;
}
public void setDictType(String dictType)
{
this.dictType = dictType;
}
@Size(min = 0, max = 100, message = "样式属性长度不能超过100个字符")
public String getCssClass()
{
return cssClass;
}
public void setCssClass(String cssClass)
{
this.cssClass = cssClass;
}
public String getListClass()
{
return listClass;
}
public void setListClass(String listClass)
{
this.listClass = listClass;
}
public boolean getDefault()
{
return UserConstants.YES.equals(this.isDefault);
}
public String getIsDefault()
{
return isDefault;
}
public void setIsDefault(String isDefault)
{
this.isDefault = isDefault;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("dictCode", getDictCode())
.append("dictSort", getDictSort())
.append("dictLabel", getDictLabel())
.append("dictValue", getDictValue())
.append("dictType", getDictType())
.append("cssClass", getCssClass())
.append("listClass", getListClass())
.append("isDefault", getIsDefault())
.append("status", getStatus())
.append("createBy", getCreateBy())
.append("createTime", getCreateTime())
.append("updateBy", getUpdateBy())
.append("updateTime", getUpdateTime())
.append("remark", getRemark())
.toString();
}
}
|
2977094657/BilibiliHistoryFetcher
| 8,405
|
scripts/system_resource_check.py
|
import platform
try:
import psutil
except ImportError:
# 如果无法导入psutil,使用print而不是logger避免循环引用
print("警告: 未安装psutil模块,无法检查系统资源。如需使用语音转文字功能,请安装psutil: pip install psutil")
psutil = None
# 避免循环引用,使用print而不是logger
# 只有在明确需要记录到日志文件时才导入logger
# from loguru import logger
# 最小资源要求
MIN_MEMORY_GB = 4 # 最小内存要求(GB)
MIN_FREE_DISK_GB = 2 # 最小可用磁盘空间(GB)
MIN_CPU_CORES = 2 # 最小CPU核心数
# 推荐资源要求
REC_MEMORY_GB = 8 # 推荐内存(GB)
REC_FREE_DISK_GB = 5 # 推荐可用磁盘空间(GB)
REC_CPU_CORES = 4 # 推荐CPU核心数
# 缓存检查结果,避免重复检查
_cached_resource_check = None
def check_system_resources():
"""
检查系统资源是否满足运行语音转文字模型的要求
返回:
dict: 包含资源检查结果和详细信息的字典
"""
global _cached_resource_check
# 如果已经检查过,直接返回缓存结果
if _cached_resource_check is not None:
return _cached_resource_check
# 如果psutil未安装,返回保守结果
if psutil is None:
_cached_resource_check = {
"error": "psutil模块未安装",
"os_info": {
"name": platform.system(),
"version": platform.version(),
"is_linux": platform.system().lower() == "linux"
},
"memory": {
"total_gb": 0,
"available_gb": 0,
"meets_minimum": False,
"meets_recommended": False
},
"cpu": {
"physical_cores": 0,
"logical_cores": 0,
"usage_percent": 0,
"meets_minimum": False,
"meets_recommended": False
},
"disk": {
"free_gb": 0,
"meets_minimum": False,
"meets_recommended": False
},
"summary": {
"has_minimum_resources": False,
"has_recommended_resources": False,
"can_run_speech_to_text": False,
"resource_limitation": "psutil模块未安装"
}
}
return _cached_resource_check
try:
# 获取系统信息
os_name = platform.system()
os_version = platform.version()
# 检查内存 - 使用异常处理避免在内存不足时崩溃
try:
memory = psutil.virtual_memory()
total_memory_gb = memory.total / (1024**3) # 转换为GB
available_memory_gb = memory.available / (1024**3) # 转换为GB
except Exception as e:
# 内存检查失败,使用保守估计
print(f"内存检查失败: {str(e)}")
total_memory_gb = 0
available_memory_gb = 0
# 检查CPU - 使用异常处理避免在CPU负载高时崩溃
try:
cpu_cores = psutil.cpu_count(logical=False) # 物理核心数
cpu_logical_cores = psutil.cpu_count(logical=True) # 逻辑核心数
# 减少CPU检查的时间间隔,避免长时间阻塞
cpu_usage = psutil.cpu_percent(interval=0.1) # CPU使用率
except Exception as e:
# CPU检查失败,使用保守估计
print(f"CPU检查失败: {str(e)}")
cpu_cores = 0
cpu_logical_cores = 0
cpu_usage = 100 # 假设CPU负载高
# 检查磁盘空间 - 使用异常处理避免在磁盘问题时崩溃
try:
disk_usage = psutil.disk_usage('/')
free_disk_gb = disk_usage.free / (1024**3) # 转换为GB
except Exception as e:
# 磁盘检查失败,使用保守估计
print(f"磁盘检查失败: {str(e)}")
free_disk_gb = 0
# 判断资源是否足够
has_min_resources = (
total_memory_gb >= MIN_MEMORY_GB and
free_disk_gb >= MIN_FREE_DISK_GB and
(cpu_cores or cpu_logical_cores) >= MIN_CPU_CORES
)
has_recommended_resources = (
total_memory_gb >= REC_MEMORY_GB and
free_disk_gb >= REC_FREE_DISK_GB and
(cpu_cores or cpu_logical_cores) >= REC_CPU_CORES
)
# 准备结果
result = {
"os_info": {
"name": os_name,
"version": os_version,
"is_linux": os_name.lower() == "linux"
},
"memory": {
"total_gb": round(total_memory_gb, 2),
"available_gb": round(available_memory_gb, 2),
"meets_minimum": total_memory_gb >= MIN_MEMORY_GB,
"meets_recommended": total_memory_gb >= REC_MEMORY_GB
},
"cpu": {
"physical_cores": cpu_cores,
"logical_cores": cpu_logical_cores,
"usage_percent": cpu_usage,
"meets_minimum": (cpu_cores or cpu_logical_cores) >= MIN_CPU_CORES,
"meets_recommended": (cpu_cores or cpu_logical_cores) >= REC_CPU_CORES
},
"disk": {
"free_gb": round(free_disk_gb, 2),
"meets_minimum": free_disk_gb >= MIN_FREE_DISK_GB,
"meets_recommended": free_disk_gb >= REC_FREE_DISK_GB
},
"summary": {
"has_minimum_resources": has_min_resources,
"has_recommended_resources": has_recommended_resources,
"can_run_speech_to_text": has_min_resources
}
}
# 如果是Linux系统,进行额外检查
if os_name.lower() == "linux":
# 检查是否有足够的内存用于语音转文字
result["summary"]["can_run_speech_to_text"] = has_min_resources and available_memory_gb >= MIN_MEMORY_GB
# 检查CPU负载
if cpu_usage > 80: # 如果CPU使用率超过80%
result["summary"]["can_run_speech_to_text"] = False
result["summary"]["resource_limitation"] = "CPU负载过高"
elif available_memory_gb < MIN_MEMORY_GB:
result["summary"]["can_run_speech_to_text"] = False
result["summary"]["resource_limitation"] = "可用内存不足"
elif free_disk_gb < MIN_FREE_DISK_GB:
result["summary"]["can_run_speech_to_text"] = False
result["summary"]["resource_limitation"] = "可用磁盘空间不足"
elif not has_min_resources:
result["summary"]["can_run_speech_to_text"] = False
result["summary"]["resource_limitation"] = "系统资源不满足最低要求"
# 缓存结果
_cached_resource_check = result
return result
except Exception as e:
# 使用print而不是logger避免循环引用
print(f"检查系统资源时出错: {str(e)}")
# 返回一个保守的结果
result = {
"error": str(e),
"os_info": {
"name": platform.system(),
"version": platform.version(),
"is_linux": platform.system().lower() == "linux"
},
"memory": {
"total_gb": 0,
"available_gb": 0,
"meets_minimum": False,
"meets_recommended": False
},
"cpu": {
"physical_cores": 0,
"logical_cores": 0,
"usage_percent": 0,
"meets_minimum": False,
"meets_recommended": False
},
"disk": {
"free_gb": 0,
"meets_minimum": False,
"meets_recommended": False
},
"summary": {
"has_minimum_resources": False,
"has_recommended_resources": False,
"can_run_speech_to_text": False,
"resource_limitation": "检查资源时出错"
}
}
# 缓存结果
_cached_resource_check = result
return result
def can_import_faster_whisper():
"""
检查是否可以导入faster_whisper模块
返回:
bool: 如果可以导入faster_whisper则返回True,否则返回False
"""
try:
# 检查系统资源
resources = check_system_resources()
# 如果是Linux系统且资源不足,则不导入faster_whisper
if resources["os_info"]["is_linux"] and not resources["summary"]["can_run_speech_to_text"]:
print(f"系统资源不足,不导入faster_whisper模块。限制原因: {resources.get('summary', {}).get('resource_limitation', '未知')}")
return False
# 尝试导入faster_whisper
try:
import faster_whisper
return True
except ImportError:
print("无法导入faster_whisper模块")
return False
except ImportError:
print("无法导入faster_whisper模块")
return False
except Exception as e:
print(f"检查faster_whisper导入时出错: {str(e)}")
return False
# 保留原函数以保持兼容性,但实际上现在我们不再使用torch
def can_import_torch():
"""
检查是否可以导入torch模块 (为了兼容性而保留,实际使用can_import_faster_whisper)
返回:
bool: 调用can_import_faster_whisper的结果
"""
return can_import_faster_whisper()
|
2929004360/ruoyi-sign
| 4,130
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/entity/SysDept.java
|
package com.ruoyi.common.core.domain.entity;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.Email;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.ruoyi.common.core.domain.BaseEntity;
/**
* 部门表 sys_dept
*
* @author ruoyi
*/
public class SysDept extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 部门ID */
private Long deptId;
/** 父部门ID */
private Long parentId;
/** 祖级列表 */
private String ancestors;
/** 部门名称 */
private String deptName;
/** 显示顺序 */
private Integer orderNum;
/** 负责人 */
private String leader;
/** 联系电话 */
private String phone;
/** 邮箱 */
private String email;
/** 部门状态:0正常,1停用 */
private String status;
/** 删除标志(0代表存在 2代表删除) */
private String delFlag;
/** 父部门名称 */
private String parentName;
/** 子部门 */
private List<SysDept> children = new ArrayList<SysDept>();
public Long getDeptId()
{
return deptId;
}
public void setDeptId(Long deptId)
{
this.deptId = deptId;
}
public Long getParentId()
{
return parentId;
}
public void setParentId(Long parentId)
{
this.parentId = parentId;
}
public String getAncestors()
{
return ancestors;
}
public void setAncestors(String ancestors)
{
this.ancestors = ancestors;
}
@NotBlank(message = "部门名称不能为空")
@Size(min = 0, max = 30, message = "部门名称长度不能超过30个字符")
public String getDeptName()
{
return deptName;
}
public void setDeptName(String deptName)
{
this.deptName = deptName;
}
@NotNull(message = "显示顺序不能为空")
public Integer getOrderNum()
{
return orderNum;
}
public void setOrderNum(Integer orderNum)
{
this.orderNum = orderNum;
}
public String getLeader()
{
return leader;
}
public void setLeader(String leader)
{
this.leader = leader;
}
@Size(min = 0, max = 11, message = "联系电话长度不能超过11个字符")
public String getPhone()
{
return phone;
}
public void setPhone(String phone)
{
this.phone = phone;
}
@Email(message = "邮箱格式不正确")
@Size(min = 0, max = 50, message = "邮箱长度不能超过50个字符")
public String getEmail()
{
return email;
}
public void setEmail(String email)
{
this.email = email;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
public String getDelFlag()
{
return delFlag;
}
public void setDelFlag(String delFlag)
{
this.delFlag = delFlag;
}
public String getParentName()
{
return parentName;
}
public void setParentName(String parentName)
{
this.parentName = parentName;
}
public List<SysDept> getChildren()
{
return children;
}
public void setChildren(List<SysDept> children)
{
this.children = children;
}
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("deptId", getDeptId())
.append("parentId", getParentId())
.append("ancestors", getAncestors())
.append("deptName", getDeptName())
.append("orderNum", getOrderNum())
.append("leader", getLeader())
.append("phone", getPhone())
.append("email", getEmail())
.append("status", getStatus())
.append("delFlag", getDelFlag())
.append("createBy", getCreateBy())
.append("createTime", getCreateTime())
.append("updateBy", getUpdateBy())
.append("updateTime", getUpdateTime())
.toString();
}
}
|
281677160/openwrt-package
| 50,194
|
luci-app-homeproxy/htdocs/luci-static/resources/view/homeproxy/node.js
|
/*
* SPDX-License-Identifier: GPL-2.0-only
*
* Copyright (C) 2022-2025 ImmortalWrt.org
*/
'use strict';
'require form';
'require fs';
'require uci';
'require ui';
'require view';
'require homeproxy as hp';
'require tools.widgets as widgets';
function allowInsecureConfirm(ev, _section_id, value) {
if (value === '1' && !confirm(_('Are you sure to allow insecure?')))
ev.target.firstElementChild.checked = null;
}
function parseShareLink(uri, features) {
let config, url, params;
uri = uri.split('://');
if (uri[0] && uri[1]) {
switch (uri[0]) {
case 'anytls':
/* https://github.com/anytls/anytls-go/blob/v0.0.8/docs/uri_scheme.md */
url = new URL('http://' + uri[1]);
params = url.searchParams;
/* Check if password exists */
if (!url.username)
return null;
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'anytls',
address: url.hostname,
port: url.port || '80',
password: url.username ? decodeURIComponent(url.username) : null,
tls: '1',
tls_sni: params.get('sni'),
tls_insecure: (params.get('insecure') === '1') ? '1' : '0'
};
break;
case 'http':
case 'https':
url = new URL('http://' + uri[1]);
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'http',
address: url.hostname,
port: url.port || '80',
username: url.username ? decodeURIComponent(url.username) : null,
password: url.password ? decodeURIComponent(url.password) : null,
tls: (uri[0] === 'https') ? '1' : '0'
};
break;
case 'hysteria':
/* https://github.com/HyNetwork/hysteria/wiki/URI-Scheme */
url = new URL('http://' + uri[1]);
params = url.searchParams;
/* WeChat-Video / FakeTCP are unsupported by sing-box currently */
if (!features.with_quic || (params.get('protocol') && params.get('protocol') !== 'udp'))
return null;
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'hysteria',
address: url.hostname,
port: url.port || '80',
hysteria_protocol: params.get('protocol') || 'udp',
hysteria_auth_type: params.get('auth') ? 'string' : null,
hysteria_auth_payload: params.get('auth'),
hysteria_obfs_password: params.get('obfsParam'),
hysteria_down_mbps: params.get('downmbps'),
hysteria_up_mbps: params.get('upmbps'),
tls: '1',
tls_sni: params.get('peer'),
tls_alpn: params.get('alpn'),
tls_insecure: (params.get('insecure') === '1') ? '1' : '0'
};
break;
case 'hysteria2':
case 'hy2':
/* https://v2.hysteria.network/docs/developers/URI-Scheme/ */
url = new URL('http://' + uri[1]);
params = url.searchParams;
if (!features.with_quic)
return null;
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'hysteria2',
address: url.hostname,
port: url.port || '80',
password: url.username ? (
decodeURIComponent(url.username + (url.password ? (':' + url.password) : ''))
) : null,
hysteria_obfs_type: params.get('obfs'),
hysteria_obfs_password: params.get('obfs-password'),
tls: '1',
tls_sni: params.get('sni'),
tls_insecure: params.get('insecure') ? '1' : '0'
};
break;
case 'socks':
case 'socks4':
case 'socks4a':
case 'socsk5':
case 'socks5h':
url = new URL('http://' + uri[1]);
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'socks',
address: url.hostname,
port: url.port || '80',
username: url.username ? decodeURIComponent(url.username) : null,
password: url.password ? decodeURIComponent(url.password) : null,
socks_version: (uri[0].includes('4')) ? '4' : '5'
};
break;
case 'ss':
try {
/* "Lovely" Shadowrocket format */
try {
let suri = uri[1].split('#'), slabel = '';
if (suri.length <= 2) {
if (suri.length === 2)
slabel = '#' + suri[1];
uri[1] = hp.decodeBase64Str(suri[0]) + slabel;
}
} catch(e) { }
/* SIP002 format https://shadowsocks.org/guide/sip002.html */
url = new URL('http://' + uri[1]);
let userinfo;
if (url.username && url.password) {
/* User info encoded with URIComponent */
userinfo = [url.username, decodeURIComponent(url.password)];
} else if (url.username) {
/* User info encoded with base64 */
userinfo = hp.decodeBase64Str(decodeURIComponent(url.username)).split(':');
if (userinfo.length > 1)
userinfo = [userinfo[0], userinfo.slice(1).join(':')]
}
if (!hp.shadowsocks_encrypt_methods.includes(userinfo[0]))
return null;
let plugin, plugin_opts;
if (url.search && url.searchParams.get('plugin')) {
let plugin_info = url.searchParams.get('plugin').split(';');
plugin = plugin_info[0];
plugin_opts = (plugin_info.length > 1) ? plugin_info.slice(1).join(';') : null;
}
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'shadowsocks',
address: url.hostname,
port: url.port || '80',
shadowsocks_encrypt_method: userinfo[0],
password: userinfo[1],
shadowsocks_plugin: plugin,
shadowsocks_plugin_opts: plugin_opts
};
} catch(e) {
/* Legacy format https://github.com/shadowsocks/shadowsocks-org/commit/78ca46cd6859a4e9475953ed34a2d301454f579e */
uri = uri[1].split('@');
if (uri.length < 2)
return null;
else if (uri.length > 2)
uri = [ uri.slice(0, -1).join('@'), uri.slice(-1).toString() ];
config = {
type: 'shadowsocks',
address: uri[1].split(':')[0],
port: uri[1].split(':')[1],
shadowsocks_encrypt_method: uri[0].split(':')[0],
password: uri[0].split(':').slice(1).join(':')
};
}
break;
case 'trojan':
/* https://p4gefau1t.github.io/trojan-go/developer/url/ */
url = new URL('http://' + uri[1]);
params = url.searchParams;
/* Check if password exists */
if (!url.username)
return null;
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'trojan',
address: url.hostname,
port: url.port || '80',
password: decodeURIComponent(url.username),
transport: params.get('type') !== 'tcp' ? params.get('type') : null,
tls: '1',
tls_sni: params.get('sni')
};
switch (params.get('type')) {
case 'grpc':
config.grpc_servicename = params.get('serviceName');
break;
case 'ws':
config.ws_host = params.get('host') ? decodeURIComponent(params.get('host')) : null;
config.ws_path = params.get('path') ? decodeURIComponent(params.get('path')) : null;
if (config.ws_path && config.ws_path.includes('?ed=')) {
config.websocket_early_data_header = 'Sec-WebSocket-Protocol';
config.websocket_early_data = config.ws_path.split('?ed=')[1];
config.ws_path = config.ws_path.split('?ed=')[0];
}
break;
}
break;
case 'tuic':
/* https://github.com/daeuniverse/dae/discussions/182 */
url = new URL('http://' + uri[1]);
params = url.searchParams;
/* Check if uuid exists */
if (!url.username)
return null;
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'tuic',
address: url.hostname,
port: url.port || '80',
uuid: url.username,
password: url.password ? decodeURIComponent(url.password) : null,
tuic_congestion_control: params.get('congestion_control'),
tuic_udp_relay_mode: params.get('udp_relay_mode'),
tls: '1',
tls_sni: params.get('sni'),
tls_alpn: params.get('alpn') ? decodeURIComponent(params.get('alpn')).split(',') : null
};
break;
case 'vless':
/* https://github.com/XTLS/Xray-core/discussions/716 */
url = new URL('http://' + uri[1]);
params = url.searchParams;
/* Unsupported protocol */
if (params.get('type') === 'kcp')
return null;
else if (params.get('type') === 'quic' && ((params.get('quicSecurity') && params.get('quicSecurity') !== 'none') || !features.with_quic))
return null;
/* Check if uuid and type exist */
if (!url.username || !params.get('type'))
return null;
config = {
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : null,
type: 'vless',
address: url.hostname,
port: url.port || '80',
uuid: url.username,
transport: params.get('type') !== 'tcp' ? params.get('type') : null,
tls: ['tls', 'xtls', 'reality'].includes(params.get('security')) ? '1' : '0',
tls_sni: params.get('sni'),
tls_alpn: params.get('alpn') ? decodeURIComponent(params.get('alpn')).split(',') : null,
tls_reality: (params.get('security') === 'reality') ? '1' : '0',
tls_reality_public_key: params.get('pbk') ? decodeURIComponent(params.get('pbk')) : null,
tls_reality_short_id: params.get('sid'),
tls_utls: features.with_utls ? params.get('fp') : null,
vless_flow: ['tls', 'reality'].includes(params.get('security')) ? params.get('flow') : null
};
switch (params.get('type')) {
case 'grpc':
config.grpc_servicename = params.get('serviceName');
break;
case 'http':
case 'tcp':
if (config.transport === 'http' || params.get('headerType') === 'http') {
config.http_host = params.get('host') ? decodeURIComponent(params.get('host')).split(',') : null;
config.http_path = params.get('path') ? decodeURIComponent(params.get('path')) : null;
}
break;
case 'httpupgrade':
config.httpupgrade_host = params.get('host') ? decodeURIComponent(params.get('host')) : null;
config.http_path = params.get('path') ? decodeURIComponent(params.get('path')) : null;
break;
case 'ws':
config.ws_host = params.get('host') ? decodeURIComponent(params.get('host')) : null;
config.ws_path = params.get('path') ? decodeURIComponent(params.get('path')) : null;
if (config.ws_path && config.ws_path.includes('?ed=')) {
config.websocket_early_data_header = 'Sec-WebSocket-Protocol';
config.websocket_early_data = config.ws_path.split('?ed=')[1];
config.ws_path = config.ws_path.split('?ed=')[0];
}
break;
}
break;
case 'vmess':
/* "Lovely" shadowrocket format */
if (uri.includes('&'))
return null;
/* https://github.com/2dust/v2rayN/wiki/Description-of-VMess-share-link */
uri = JSON.parse(hp.decodeBase64Str(uri[1]));
if (uri.v != '2')
return null;
/* Unsupported protocols */
else if (uri.net === 'kcp')
return null;
else if (uri.net === 'quic' && ((uri.type && uri.type !== 'none') || !features.with_quic))
return null;
/* https://www.v2fly.org/config/protocols/vmess.html#vmess-md5-%E8%AE%A4%E8%AF%81%E4%BF%A1%E6%81%AF-%E6%B7%98%E6%B1%B0%E6%9C%BA%E5%88%B6
* else if (uri.aid && parseInt(uri.aid) !== 0)
* return null;
*/
config = {
label: uri.ps,
type: 'vmess',
address: uri.add,
port: uri.port,
uuid: uri.id,
vmess_alterid: uri.aid,
vmess_encrypt: uri.scy || 'auto',
transport: (uri.net !== 'tcp') ? uri.net : null,
tls: uri.tls === 'tls' ? '1' : '0',
tls_sni: uri.sni || uri.host,
tls_alpn: uri.alpn ? uri.alpn.split(',') : null,
tls_utls: features.with_utls ? uri.fp : null
};
switch (uri.net) {
case 'grpc':
config.grpc_servicename = uri.path;
break;
case 'h2':
case 'tcp':
if (uri.net === 'h2' || uri.type === 'http') {
config.transport = 'http';
config.http_host = uri.host ? uri.host.split(',') : null;
config.http_path = uri.path;
}
break;
case 'httpupgrade':
config.httpupgrade_host = uri.host;
config.http_path = uri.path;
break;
case 'ws':
config.ws_host = uri.host;
config.ws_path = uri.path;
if (config.ws_path && config.ws_path.includes('?ed=')) {
config.websocket_early_data_header = 'Sec-WebSocket-Protocol';
config.websocket_early_data = config.ws_path.split('?ed=')[1];
config.ws_path = config.ws_path.split('?ed=')[0];
}
break;
}
break;
}
}
if (config) {
if (!config.address || !config.port)
return null;
else if (!config.label)
config.label = config.address + ':' + config.port;
config.address = config.address.replace(/\[|\]/g, '');
}
return config;
}
function renderNodeSettings(section, data, features, main_node, routing_mode) {
let s = section, o;
s.rowcolors = true;
s.sortable = true;
s.nodescriptions = true;
s.modaltitle = L.bind(hp.loadModalTitle, this, _('Node'), _('Add a node'), data[0]);
s.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
if (routing_mode !== 'custom') {
o = s.option(form.Button, '_apply', _('Apply'));
o.editable = true;
o.modalonly = false;
o.inputstyle = 'apply';
o.inputtitle = function(section_id) {
if (main_node == section_id) {
this.readonly = true;
return _('Applied');
} else {
this.readonly = false;
return _('Apply');
}
}
o.onclick = function(ev, section_id) {
uci.set(data[0], 'config', 'main_node', section_id);
return this.map.save(null, true).then(() => {
ui.changes.apply(true);
});
}
}
o = s.option(form.Value, 'label', _('Label'));
o.load = L.bind(hp.loadDefaultLabel, this, data[0]);
o.validate = L.bind(hp.validateUniqueValue, this, data[0], 'node', 'label');
o.modalonly = true;
o = s.option(form.ListValue, 'type', _('Type'));
o.value('direct', _('Direct'));
o.value('anytls', _('AnyTLS'));
o.value('http', _('HTTP'));
if (features.with_quic) {
o.value('hysteria', _('Hysteria'));
o.value('hysteria2', _('Hysteria2'));
}
o.value('shadowsocks', _('Shadowsocks'));
o.value('shadowtls', _('ShadowTLS'));
o.value('socks', _('Socks'));
o.value('ssh', _('SSH'));
o.value('trojan', _('Trojan'));
if (features.with_quic)
o.value('tuic', _('Tuic'));
if (features.with_wireguard && features.with_gvisor)
o.value('wireguard', _('WireGuard'));
o.value('vless', _('VLESS'));
o.value('vmess', _('VMess'));
o.rmempty = false;
o = s.option(form.Value, 'address', _('Address'));
o.datatype = 'host';
o.depends({'type': 'direct', '!reverse': true});
o.rmempty = false;
o = s.option(form.Value, 'port', _('Port'));
o.datatype = 'port';
o.depends({'type': 'direct', '!reverse': true});
o.rmempty = false;
o = s.option(form.Value, 'username', _('Username'));
o.depends('type', 'http');
o.depends('type', 'socks');
o.depends('type', 'ssh');
o.modalonly = true;
o = s.option(form.Value, 'password', _('Password'));
o.password = true;
o.depends('type', 'anytls');
o.depends('type', 'http');
o.depends('type', 'hysteria2');
o.depends('type', 'shadowsocks');
o.depends('type', 'ssh');
o.depends('type', 'trojan');
o.depends('type', 'tuic');
o.depends({'type': 'shadowtls', 'shadowtls_version': '2'});
o.depends({'type': 'shadowtls', 'shadowtls_version': '3'});
o.depends({'type': 'socks', 'socks_version': '5'});
o.validate = function(section_id, value) {
if (section_id) {
let type = this.section.formvalue(section_id, 'type');
let required_type = [ 'anytls', 'shadowsocks', 'shadowtls', 'trojan' ];
if (required_type.includes(type)) {
if (type === 'shadowsocks') {
let encmode = this.section.formvalue(section_id, 'shadowsocks_encrypt_method');
if (encmode === 'none')
return true;
}
if (!value)
return _('Expecting: %s').format(_('non-empty value'));
}
}
return true;
}
o.modalonly = true;
/* Direct config */
o = s.option(form.ListValue, 'proxy_protocol', _('Proxy protocol'),
_('Write proxy protocol in the connection header.'));
o.value('', _('Disable'));
o.value('1', _('v1'));
o.value('2', _('v2'));
o.depends('type', 'direct');
o.modalonly = true;
/* AnyTLS config start */
o = s.option(form.Value, 'anytls_idle_session_check_interval', _('Idle session check interval'),
_('Interval checking for idle sessions, in seconds.'));
o.datatype = 'uinteger';
o.placeholder = '30';
o.depends('type', 'anytls');
o.modalonly = true;
o = s.option(form.Value, 'anytls_idle_session_timeout', _('Idle session check timeout'),
_('In the check, close sessions that have been idle for longer than this, in seconds.'));
o.datatype = 'uinteger';
o.placeholder = '30';
o.depends('type', 'anytls');
o.modalonly = true;
o = s.option(form.Value, 'anytls_min_idle_session', _('Minimum idle sessions'),
_('In the check, at least the first <code>n</code> idle sessions are kept open.'));
o.datatype = 'uinteger';
o.placeholder = '0';
o.depends('type', 'anytls');
o.modalonly = true;
/* AnyTLS config end */
/* Hysteria (2) config start */
o = s.option(form.DynamicList, 'hysteria_hopping_port', _('Hopping port'));
o.depends('type', 'hysteria');
o.depends('type', 'hysteria2');
o.validate = hp.validatePortRange;
o.modalonly = true;
o = s.option(form.Value, 'hysteria_hop_interval', _('Hop interval'),
_('Port hopping interval in seconds.'));
o.datatype = 'uinteger';
o.placeholder = '30';
o.depends({'type': 'hysteria', 'hysteria_hopping_port': /[\s\S]/});
o.depends({'type': 'hysteria2', 'hysteria_hopping_port': /[\s\S]/});
o.modalonly = true;
o = s.option(form.ListValue, 'hysteria_protocol', _('Protocol'));
o.value('udp');
/* WeChat-Video / FakeTCP are unsupported by sing-box currently
* o.value('wechat-video');
* o.value('faketcp');
*/
o.default = 'udp';
o.depends('type', 'hysteria');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.ListValue, 'hysteria_auth_type', _('Authentication type'));
o.value('', _('Disable'));
o.value('base64', _('Base64'));
o.value('string', _('String'));
o.depends('type', 'hysteria');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_auth_payload', _('Authentication payload'));
o.password = true
o.depends({'type': 'hysteria', 'hysteria_auth_type': /[\s\S]/});
o.rmempty = false;
o.modalonly = true;
o = s.option(form.ListValue, 'hysteria_obfs_type', _('Obfuscate type'));
o.value('', _('Disable'));
o.value('salamander', _('Salamander'));
o.depends('type', 'hysteria2');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_obfs_password', _('Obfuscate password'));
o.password = true;
o.depends('type', 'hysteria');
o.depends({'type': 'hysteria2', 'hysteria_obfs_type': /[\s\S]/});
o.modalonly = true;
o = s.option(form.Value, 'hysteria_down_mbps', _('Max download speed'),
_('Max download speed in Mbps.'));
o.datatype = 'uinteger';
o.depends('type', 'hysteria');
o.depends('type', 'hysteria2');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_up_mbps', _('Max upload speed'),
_('Max upload speed in Mbps.'));
o.datatype = 'uinteger';
o.depends('type', 'hysteria');
o.depends('type', 'hysteria2');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_recv_window_conn', _('QUIC stream receive window'),
_('The QUIC stream-level flow control window for receiving data.'));
o.datatype = 'uinteger';
o.depends('type', 'hysteria');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_revc_window', _('QUIC connection receive window'),
_('The QUIC connection-level flow control window for receiving data.'));
o.datatype = 'uinteger';
o.depends('type', 'hysteria');
o.modalonly = true;
o = s.option(form.Flag, 'hysteria_disable_mtu_discovery', _('Disable Path MTU discovery'),
_('Disables Path MTU Discovery (RFC 8899). Packets will then be at most 1252 (IPv4) / 1232 (IPv6) bytes in size.'));
o.depends('type', 'hysteria');
o.modalonly = true;
/* Hysteria (2) config end */
/* Shadowsocks config start */
o = s.option(form.ListValue, 'shadowsocks_encrypt_method', _('Encrypt method'));
for (let i of hp.shadowsocks_encrypt_methods)
o.value(i);
/* Stream ciphers */
o.value('aes-128-ctr');
o.value('aes-192-ctr');
o.value('aes-256-ctr');
o.value('aes-128-cfb');
o.value('aes-192-cfb');
o.value('aes-256-cfb');
o.value('chacha20');
o.value('chacha20-ietf');
o.value('rc4-md5');
o.default = 'aes-128-gcm';
o.depends('type', 'shadowsocks');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.ListValue, 'shadowsocks_plugin', _('Plugin'));
o.value('', _('none'));
o.value('obfs-local');
o.value('v2ray-plugin');
o.depends('type', 'shadowsocks');
o.modalonly = true;
o = s.option(form.Value, 'shadowsocks_plugin_opts', _('Plugin opts'));
o.depends('shadowsocks_plugin', 'obfs-local');
o.depends('shadowsocks_plugin', 'v2ray-plugin');
o.modalonly = true;
/* Shadowsocks config end */
/* ShadowTLS config */
o = s.option(form.ListValue, 'shadowtls_version', _('ShadowTLS version'));
o.value('1', _('v1'));
o.value('2', _('v2'));
o.value('3', _('v3'));
o.default = '1';
o.depends('type', 'shadowtls');
o.rmempty = false;
o.modalonly = true;
/* Socks config */
o = s.option(form.ListValue, 'socks_version', _('Socks version'));
o.value('4', _('Socks4'));
o.value('4a', _('Socks4A'));
o.value('5', _('Socks5'));
o.default = '5';
o.depends('type', 'socks');
o.rmempty = false;
o.modalonly = true;
/* SSH config start */
o = s.option(form.Value, 'ssh_client_version', _('Client version'),
_('Random version will be used if empty.'));
o.depends('type', 'ssh');
o.modalonly = true;
o = s.option(form.DynamicList, 'ssh_host_key', _('Host key'),
_('Accept any if empty.'));
o.depends('type', 'ssh');
o.modalonly = true;
o = s.option(form.DynamicList, 'ssh_host_key_algo', _('Host key algorithms'))
o.depends('type', 'ssh');
o.modalonly = true;
o = s.option(form.DynamicList, 'ssh_priv_key', _('Private key'));
o.password = true;
o.depends('type', 'ssh');
o.modalonly = true;
o = s.option(form.Value, 'ssh_priv_key_pp', _('Private key passphrase'));
o.password = true;
o.depends('type', 'ssh');
o.modalonly = true;
/* SSH config end */
/* TUIC config start */
o = s.option(form.Value, 'uuid', _('UUID'));
o.password = true;
o.depends('type', 'tuic');
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.validate = hp.validateUUID;
o.modalonly = true;
o = s.option(form.ListValue, 'tuic_congestion_control', _('Congestion control algorithm'),
_('QUIC congestion control algorithm.'));
o.value('cubic', _('CUBIC'));
o.value('new_reno', _('New Reno'));
o.value('bbr', _('BBR'));
o.default = 'cubic';
o.depends('type', 'tuic');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.ListValue, 'tuic_udp_relay_mode', _('UDP relay mode'),
_('UDP packet relay mode.'));
o.value('', _('Default'));
o.value('native', _('Native'));
o.value('quic', _('QUIC'));
o.depends('type', 'tuic');
o.modalonly = true;
o = s.option(form.Flag, 'tuic_udp_over_stream', _('UDP over stream'),
_('This is the TUIC port of the UDP over TCP protocol, designed to provide a QUIC stream based UDP relay mode that TUIC does not provide.'));
o.depends({'type': 'tuic','tuic_udp_relay_mode': ''});
o.modalonly = true;
o = s.option(form.Flag, 'tuic_enable_zero_rtt', _('Enable 0-RTT handshake'),
_('Enable 0-RTT QUIC connection handshake on the client side. This is not impacting much on the performance, as the protocol is fully multiplexed.<br/>' +
'Disabling this is highly recommended, as it is vulnerable to replay attacks.'));
o.depends('type', 'tuic');
o.modalonly = true;
o = s.option(form.Value, 'tuic_heartbeat', _('Heartbeat interval'),
_('Interval for sending heartbeat packets for keeping the connection alive (in seconds).'));
o.datatype = 'uinteger';
o.default = '10';
o.depends('type', 'tuic');
o.modalonly = true;
/* Tuic config end */
/* VMess / VLESS config start */
o = s.option(form.ListValue, 'vless_flow', _('Flow'));
o.value('', _('None'));
o.value('xtls-rprx-vision');
o.depends('type', 'vless');
o.modalonly = true;
o = s.option(form.Value, 'vmess_alterid', _('Alter ID'),
_('Legacy protocol support (VMess MD5 Authentication) is provided for compatibility purposes only, use of alterId > 1 is not recommended.'));
o.datatype = 'uinteger';
o.depends('type', 'vmess');
o.modalonly = true;
o = s.option(form.ListValue, 'vmess_encrypt', _('Encrypt method'));
o.value('auto');
o.value('none');
o.value('zero');
o.value('aes-128-gcm');
o.value('chacha20-poly1305');
o.default = 'auto';
o.depends('type', 'vmess');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Flag, 'vmess_global_padding', _('Global padding'),
_('Protocol parameter. Will waste traffic randomly if enabled (enabled by default in v2ray and cannot be disabled).'));
o.default = o.enabled;
o.depends('type', 'vmess');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Flag, 'vmess_authenticated_length', _('Authenticated length'),
_('Protocol parameter. Enable length block encryption.'));
o.depends('type', 'vmess');
o.modalonly = true;
/* VMess config end */
/* Transport config start */
o = s.option(form.ListValue, 'transport', _('Transport'),
_('No TCP transport, plain HTTP is merged into the HTTP transport.'));
o.value('', _('None'));
o.value('grpc', _('gRPC'));
o.value('http', _('HTTP'));
o.value('httpupgrade', _('HTTPUpgrade'));
o.value('quic', _('QUIC'));
o.value('ws', _('WebSocket'));
o.depends('type', 'trojan');
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.onchange = function(ev, section_id, value) {
let desc = this.map.findElement('id', 'cbid.homeproxy.%s.transport'.format(section_id)).nextElementSibling;
if (value === 'http')
desc.innerHTML = _('TLS is not enforced. If TLS is not configured, plain HTTP 1.1 is used.');
else if (value === 'quic')
desc.innerHTML = _('No additional encryption support: It\'s basically duplicate encryption.');
else
desc.innerHTML = _('No TCP transport, plain HTTP is merged into the HTTP transport.');
let tls = this.map.findElement('id', 'cbid.homeproxy.%s.tls'.format(section_id)).firstElementChild;
if ((value === 'http' && tls.checked) || (value === 'grpc' && !features.with_grpc)) {
this.map.findElement('id', 'cbid.homeproxy.%s.http_idle_timeout'.format(section_id)).nextElementSibling.innerHTML =
_('Specifies the period of time (in seconds) after which a health check will be performed using a ping frame if no frames have been received on the connection.<br/>' +
'Please note that a ping response is considered a received frame, so if there is no other traffic on the connection, the health check will be executed every interval.');
this.map.findElement('id', 'cbid.homeproxy.%s.http_ping_timeout'.format(section_id)).nextElementSibling.innerHTML =
_('Specifies the timeout duration (in seconds) after sending a PING frame, within which a response must be received.<br/>' +
'If a response to the PING frame is not received within the specified timeout duration, the connection will be closed.');
} else if (value === 'grpc' && features.with_grpc) {
this.map.findElement('id', 'cbid.homeproxy.%s.http_idle_timeout'.format(section_id)).nextElementSibling.innerHTML =
_('If the transport doesn\'t see any activity after a duration of this time (in seconds), it pings the client to check if the connection is still active.');
this.map.findElement('id', 'cbid.homeproxy.%s.http_ping_timeout'.format(section_id)).nextElementSibling.innerHTML =
_('The timeout (in seconds) that after performing a keepalive check, the client will wait for activity. If no activity is detected, the connection will be closed.');
}
}
o.modalonly = true;
/* gRPC config start */
o = s.option(form.Value, 'grpc_servicename', _('gRPC service name'));
o.depends('transport', 'grpc');
o.modalonly = true;
if (features.with_grpc) {
o = s.option(form.Flag, 'grpc_permit_without_stream', _('gRPC permit without stream'),
_('If enabled, the client transport sends keepalive pings even with no active connections.'));
o.depends('transport', 'grpc');
o.modalonly = true;
}
/* gRPC config end */
/* HTTP(Upgrade) config start */
o = s.option(form.DynamicList, 'http_host', _('Host'));
o.datatype = 'hostname';
o.depends('transport', 'http');
o.modalonly = true;
o = s.option(form.Value, 'httpupgrade_host', _('Host'));
o.datatype = 'hostname';
o.depends('transport', 'httpupgrade');
o.modalonly = true;
o = s.option(form.Value, 'http_path', _('Path'));
o.depends('transport', 'http');
o.depends('transport', 'httpupgrade');
o.modalonly = true;
o = s.option(form.Value, 'http_method', _('Method'));
o.value('GET', _('GET'));
o.value('PUT', _('PUT'));
o.depends('transport', 'http');
o.modalonly = true;
o = s.option(form.Value, 'http_idle_timeout', _('Idle timeout'),
_('Specifies the period of time (in seconds) after which a health check will be performed using a ping frame if no frames have been received on the connection.<br/>' +
'Please note that a ping response is considered a received frame, so if there is no other traffic on the connection, the health check will be executed every interval.'));
o.datatype = 'uinteger';
o.depends('transport', 'grpc');
o.depends({'transport': 'http', 'tls': '1'});
o.modalonly = true;
o = s.option(form.Value, 'http_ping_timeout', _('Ping timeout'),
_('Specifies the timeout duration (in seconds) after sending a PING frame, within which a response must be received.<br/>' +
'If a response to the PING frame is not received within the specified timeout duration, the connection will be closed.'));
o.datatype = 'uinteger';
o.depends('transport', 'grpc');
o.depends({'transport': 'http', 'tls': '1'});
o.modalonly = true;
/* HTTP config end */
/* WebSocket config start */
o = s.option(form.Value, 'ws_host', _('Host'));
o.depends('transport', 'ws');
o.modalonly = true;
o = s.option(form.Value, 'ws_path', _('Path'));
o.depends('transport', 'ws');
o.modalonly = true;
o = s.option(form.Value, 'websocket_early_data', _('Early data'),
_('Allowed payload size is in the request.'));
o.datatype = 'uinteger';
o.value('2048');
o.depends('transport', 'ws');
o.modalonly = true;
o = s.option(form.Value, 'websocket_early_data_header', _('Early data header name'));
o.value('Sec-WebSocket-Protocol');
o.depends('transport', 'ws');
o.modalonly = true;
/* WebSocket config end */
o = s.option(form.ListValue, 'packet_encoding', _('Packet encoding'));
o.value('', _('none'));
o.value('packetaddr', _('packet addr (v2ray-core v5+)'));
o.value('xudp', _('Xudp (Xray-core)'));
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.modalonly = true;
/* Transport config end */
/* Wireguard config start */
o = s.option(form.DynamicList, 'wireguard_local_address', _('Local address'),
_('List of IP (v4 or v6) addresses prefixes to be assigned to the interface.'));
o.datatype = 'cidr';
o.depends('type', 'wireguard');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'wireguard_private_key', _('Private key'),
_('WireGuard requires base64-encoded private keys.'));
o.password = true;
o.depends('type', 'wireguard');
o.validate = L.bind(hp.validateBase64Key, this, 44);
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'wireguard_peer_public_key', _('Peer pubkic key'),
_('WireGuard peer public key.'));
o.depends('type', 'wireguard');
o.validate = L.bind(hp.validateBase64Key, this, 44);
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'wireguard_pre_shared_key', _('Pre-shared key'),
_('WireGuard pre-shared key.'));
o.password = true;
o.depends('type', 'wireguard');
o.validate = L.bind(hp.validateBase64Key, this, 44);
o.modalonly = true;
o = s.option(form.DynamicList, 'wireguard_reserved', _('Reserved field bytes'));
o.datatype = 'integer';
o.depends('type', 'wireguard');
o.modalonly = true;
o = s.option(form.Value, 'wireguard_mtu', _('MTU'));
o.datatype = 'range(0,9000)';
o.placeholder = '1408';
o.depends('type', 'wireguard');
o.modalonly = true;
o = s.option(form.Value, 'wireguard_persistent_keepalive_interval', _('Persistent keepalive interval'),
_('In seconds. Disabled by default.'));
o.datatype = 'uinteger';
o.depends('type', 'wireguard');
o.modalonly = true;
/* Wireguard config end */
/* Mux config start */
o = s.option(form.Flag, 'multiplex', _('Multiplex'));
o.depends('type', 'shadowsocks');
o.depends('type', 'trojan');
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.modalonly = true;
o = s.option(form.ListValue, 'multiplex_protocol', _('Protocol'),
_('Multiplex protocol.'));
o.value('h2mux');
o.value('smux');
o.value('yamux');
o.default = 'h2mux';
o.depends('multiplex', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'multiplex_max_connections', _('Maximum connections'));
o.datatype = 'uinteger';
o.depends('multiplex', '1');
o.modalonly = true;
o = s.option(form.Value, 'multiplex_min_streams', _('Minimum streams'),
_('Minimum multiplexed streams in a connection before opening a new connection.'));
o.datatype = 'uinteger';
o.depends('multiplex', '1');
o.modalonly = true;
o = s.option(form.Value, 'multiplex_max_streams', _('Maximum streams'),
_('Maximum multiplexed streams in a connection before opening a new connection.<br/>' +
'Conflict with <code>%s</code> and <code>%s</code>.').format(
_('Maximum connections'), _('Minimum streams')));
o.datatype = 'uinteger';
o.depends({'multiplex': '1', 'multiplex_max_connections': '', 'multiplex_min_streams': ''});
o.modalonly = true;
o = s.option(form.Flag, 'multiplex_padding', _('Enable padding'));
o.depends('multiplex', '1');
o.modalonly = true;
o = s.option(form.Flag, 'multiplex_brutal', _('Enable TCP Brutal'),
_('Enable TCP Brutal congestion control algorithm'));
o.depends('multiplex', '1');
o.modalonly = true;
o = s.option(form.Value, 'multiplex_brutal_down', _('Download bandwidth'),
_('Download bandwidth in Mbps.'));
o.datatype = 'uinteger';
o.depends('multiplex_brutal', '1');
o.modalonly = true;
o = s.option(form.Value, 'multiplex_brutal_up', _('Upload bandwidth'),
_('Upload bandwidth in Mbps.'));
o.datatype = 'uinteger';
o.depends('multiplex_brutal', '1');
o.modalonly = true;
/* Mux config end */
/* TLS config start */
o = s.option(form.Flag, 'tls', _('TLS'));
o.depends('type', 'anytls');
o.depends('type', 'http');
o.depends('type', 'hysteria');
o.depends('type', 'hysteria2');
o.depends('type', 'shadowtls');
o.depends('type', 'trojan');
o.depends('type', 'tuic');
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.validate = function(section_id, _value) {
if (section_id) {
let type = this.map.lookupOption('type', section_id)[0].formvalue(section_id);
let tls = this.map.findElement('id', 'cbid.homeproxy.%s.tls'.format(section_id)).firstElementChild;
if (['anytls', 'hysteria', 'hysteria2', 'shadowtls', 'tuic'].includes(type)) {
tls.checked = true;
tls.disabled = true;
} else {
tls.disabled = null;
}
}
return true;
}
o.modalonly = true;
o = s.option(form.Value, 'tls_sni', _('TLS SNI'),
_('Used to verify the hostname on the returned certificates unless insecure is given.'));
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.DynamicList, 'tls_alpn', _('TLS ALPN'),
_('List of supported application level protocols, in order of preference.'));
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.Flag, 'tls_insecure', _('Allow insecure'),
_('Allow insecure connection at TLS client.') +
'<br/>' +
_('This is <strong>DANGEROUS</strong>, your traffic is almost like <strong>PLAIN TEXT</strong>! Use at your own risk!'));
o.depends('tls', '1');
o.onchange = allowInsecureConfirm;
o.modalonly = true;
o = s.option(form.ListValue, 'tls_min_version', _('Minimum TLS version'),
_('The minimum TLS version that is acceptable.'));
o.value('', _('default'));
for (let i of hp.tls_versions)
o.value(i);
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.ListValue, 'tls_max_version', _('Maximum TLS version'),
_('The maximum TLS version that is acceptable.'));
o.value('', _('default'));
for (let i of hp.tls_versions)
o.value(i);
o.depends('tls', '1');
o.modalonly = true;
o = s.option(hp.CBIStaticList, 'tls_cipher_suites', _('Cipher suites'),
_('The elliptic curves that will be used in an ECDHE handshake, in preference order. If empty, the default will be used.'));
for (let i of hp.tls_cipher_suites)
o.value(i);
o.depends('tls', '1');
o.optional = true;
o.modalonly = true;
o = s.option(form.Flag, 'tls_self_sign', _('Append self-signed certificate'),
_('If you have the root certificate, use this option instead of allowing insecure.'));
o.depends('tls_insecure', '0');
o.modalonly = true;
o = s.option(form.Value, 'tls_cert_path', _('Certificate path'),
_('The path to the server certificate, in PEM format.'));
o.value('/etc/homeproxy/certs/client_ca.pem');
o.depends('tls_self_sign', '1');
o.validate = hp.validateCertificatePath;
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Button, '_upload_cert', _('Upload certificate'),
_('<strong>Save your configuration before uploading files!</strong>'));
o.inputstyle = 'action';
o.inputtitle = _('Upload...');
o.depends({'tls_self_sign': '1', 'tls_cert_path': '/etc/homeproxy/certs/client_ca.pem'});
o.onclick = L.bind(hp.uploadCertificate, this, _('certificate'), 'client_ca');
o.modalonly = true;
o = s.option(form.Flag, 'tls_ech', _('Enable ECH'),
_('ECH (Encrypted Client Hello) is a TLS extension that allows a client to encrypt the first part of its ClientHello message.'));
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.Value, 'tls_ech_config_path', _('ECH config path'),
_('The path to the ECH config, in PEM format. If empty, load from DNS will be attempted.'));
o.value('/etc/homeproxy/certs/client_ech_conf.pem');
o.depends('tls_ech', '1');
o.modalonly = true;
o = s.option(form.Button, '_upload_ech_config', _('Upload ECH config'),
_('<strong>Save your configuration before uploading files!</strong>'));
o.inputstyle = 'action';
o.inputtitle = _('Upload...');
o.depends({'tls_ech': '1', 'tls_ech_config_path': '/etc/homeproxy/certs/client_ech_conf.pem'});
o.onclick = L.bind(hp.uploadCertificate, this, _('ECH config'), 'client_ech_conf');
o.modalonly = true;
if (features.with_utls) {
o = s.option(form.ListValue, 'tls_utls', _('uTLS fingerprint'),
_('uTLS is a fork of "crypto/tls", which provides ClientHello fingerprinting resistance.'));
o.value('', _('Disable'));
o.value('360');
o.value('android');
o.value('chrome');
o.value('edge');
o.value('firefox');
o.value('ios');
o.value('qq');
o.value('random');
o.value('randomized');
o.value('safari');
o.depends({'tls': '1', 'type': /^((?!hysteria2?|tuic$).)+$/});
o.validate = function(section_id, value) {
if (section_id) {
let tls_reality = this.map.findElement('id', 'cbid.homeproxy.%s.tls_reality'.format(section_id)).firstElementChild;
if (tls_reality.checked && !value)
return _('Expecting: %s').format(_('non-empty value'));
let vless_flow = this.map.lookupOption('vless_flow', section_id)[0].formvalue(section_id);
if ((tls_reality.checked || vless_flow) && ['360', 'android'].includes(value))
return _('Unsupported fingerprint!');
}
return true;
}
o.modalonly = true;
o = s.option(form.Flag, 'tls_reality', _('REALITY'));
o.depends({'tls': '1', 'type': 'anytls'});
o.depends({'tls': '1', 'type': 'vless'});
o.modalonly = true;
o = s.option(form.Value, 'tls_reality_public_key', _('REALITY public key'));
o.password = true;
o.depends('tls_reality', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_reality_short_id', _('REALITY short ID'));
o.password = true;
o.depends('tls_reality', '1');
o.modalonly = true;
}
/* TLS config end */
/* Extra settings start */
o = s.option(form.Flag, 'tcp_fast_open', _('TCP fast open'));
o.modalonly = true;
o = s.option(form.Flag, 'tcp_multi_path', _('MultiPath TCP'));
o.modalonly = true;
o = s.option(form.Flag, 'udp_fragment', _('UDP Fragment'),
_('Enable UDP fragmentation.'));
o.modalonly = true;
o = s.option(form.Flag, 'udp_over_tcp', _('UDP over TCP'),
_('Enable the SUoT protocol, requires server support. Conflict with multiplex.'));
o.depends('type', 'socks');
o.depends({'type': 'shadowsocks', 'multiplex': '0'});
o.modalonly = true;
o = s.option(form.ListValue, 'udp_over_tcp_version', _('SUoT version'));
o.value('1', _('v1'));
o.value('2', _('v2'));
o.default = '2';
o.depends('udp_over_tcp', '1');
o.modalonly = true;
/* Extra settings end */
return s;
}
return view.extend({
load() {
return Promise.all([
uci.load('homeproxy'),
hp.getBuiltinFeatures()
]);
},
render(data) {
let m, s, o, ss, so;
let main_node = uci.get(data[0], 'config', 'main_node');
let routing_mode = uci.get(data[0], 'config', 'routing_mode');
let features = data[1];
/* Cache subscription information, it will be called multiple times */
let subinfo = [];
for (let suburl of (uci.get(data[0], 'subscription', 'subscription_url') || [])) {
const url = new URL(suburl);
const urlhash = hp.calcStringMD5(suburl.replace(/#.*$/, ''));
const title = url.hash ? decodeURIComponent(url.hash.slice(1)) : url.hostname;
subinfo.push({ 'hash': urlhash, 'title': title });
}
m = new form.Map('homeproxy', _('Edit nodes'));
s = m.section(form.NamedSection, 'subscription', 'homeproxy');
/* Node settings start */
/* User nodes start */
s.tab('node', _('Nodes'));
o = s.taboption('node', form.SectionValue, '_node', form.GridSection, 'node');
ss = renderNodeSettings(o.subsection, data, features, main_node, routing_mode);
ss.addremove = true;
ss.filter = function(section_id) {
for (let info of subinfo)
if (info.hash === uci.get(data[0], section_id, 'grouphash'))
return false;
return true;
}
/* Import subscription links start */
/* Thanks to luci-app-shadowsocks-libev */
ss.handleLinkImport = function() {
let textarea = new ui.Textarea();
ui.showModal(_('Import share links'), [
E('p', _('Support Hysteria, Shadowsocks, Trojan, v2rayN (VMess), and XTLS (VLESS) online configuration delivery standard.')),
textarea.render(),
E('div', { class: 'right' }, [
E('button', {
class: 'btn',
click: ui.hideModal
}, [ _('Cancel') ]),
'',
E('button', {
class: 'btn cbi-button-action',
click: ui.createHandlerFn(this, () => {
let input_links = textarea.getValue().trim().split('\n');
if (input_links && input_links[0]) {
/* Remove duplicate lines */
input_links = input_links.reduce((pre, cur) =>
(!pre.includes(cur) && pre.push(cur), pre), []);
let allow_insecure = uci.get(data[0], 'subscription', 'allow_insecure');
let packet_encoding = uci.get(data[0], 'subscription', 'packet_encoding');
let imported_node = 0;
input_links.forEach((l) => {
let config = parseShareLink(l, features);
if (config) {
if (config.tls === '1' && allow_insecure === '1')
config.tls_insecure = '1'
if (['vless', 'vmess'].includes(config.type))
config.packet_encoding = packet_encoding
let nameHash = hp.calcStringMD5(config.label);
let sid = uci.add(data[0], 'node', nameHash);
Object.keys(config).forEach((k) => {
uci.set(data[0], sid, k, config[k]);
});
imported_node++;
}
});
if (imported_node === 0)
ui.addNotification(null, E('p', _('No valid share link found.')));
else
ui.addNotification(null, E('p', _('Successfully imported %s nodes of total %s.').format(
imported_node, input_links.length)));
return uci.save()
.then(L.bind(this.map.load, this.map))
.then(L.bind(this.map.reset, this.map))
.then(L.ui.hideModal)
.catch(() => {});
} else {
return ui.hideModal();
}
})
}, [ _('Import') ])
])
])
}
ss.renderSectionAdd = function(/* ... */) {
let el = form.GridSection.prototype.renderSectionAdd.apply(this, arguments),
nameEl = el.querySelector('.cbi-section-create-name');
ui.addValidator(nameEl, 'uciname', true, (v) => {
let button = el.querySelector('.cbi-section-create > .cbi-button-add');
let uciconfig = this.uciconfig || this.map.config;
if (!v) {
button.disabled = true;
return true;
} else if (uci.get(uciconfig, v)) {
button.disabled = true;
return _('Expecting: %s').format(_('unique UCI identifier'));
} else {
button.disabled = null;
return true;
}
}, 'blur', 'keyup');
el.appendChild(E('button', {
'class': 'cbi-button cbi-button-add',
'title': _('Import share links'),
'click': ui.createHandlerFn(this, 'handleLinkImport')
}, [ _('Import share links') ]));
return el;
}
/* Import subscription links end */
/* User nodes end */
/* Subscription nodes start */
for (const info of subinfo) {
s.tab('sub_' + info.hash, _('Sub (%s)').format(info.title));
o = s.taboption('sub_' + info.hash, form.SectionValue, '_sub_' + info.hash, form.GridSection, 'node');
ss = renderNodeSettings(o.subsection, data, features, main_node, routing_mode);
ss.filter = function(section_id) {
return (uci.get(data[0], section_id, 'grouphash') === info.hash);
}
}
/* Subscription nodes end */
/* Node settings end */
/* Subscriptions settings start */
s.tab('subscription', _('Subscriptions'));
o = s.taboption('subscription', form.Flag, 'auto_update', _('Auto update'),
_('Auto update subscriptions and geodata.'));
o.rmempty = false;
o = s.taboption('subscription', form.ListValue, 'auto_update_time', _('Update time'));
for (let i = 0; i < 24; i++)
o.value(i, i + ':00');
o.default = '2';
o.depends('auto_update', '1');
o = s.taboption('subscription', form.Flag, 'update_via_proxy', _('Update via proxy'),
_('Update subscriptions via proxy.'));
o.rmempty = false;
o = s.taboption('subscription', form.DynamicList, 'subscription_url', _('Subscription URL-s'),
_('Support Hysteria, Shadowsocks, Trojan, v2rayN (VMess), and XTLS (VLESS) online configuration delivery standard.'));
o.validate = function(section_id, value) {
if (section_id && value) {
try {
let url = new URL(value);
if (!url.hostname)
return _('Expecting: %s').format(_('valid URL'));
}
catch(e) {
return _('Expecting: %s').format(_('valid URL'));
}
}
return true;
}
o = s.taboption('subscription', form.ListValue, 'filter_nodes', _('Filter nodes'),
_('Drop/keep specific nodes from subscriptions.'));
o.value('disabled', _('Disable'));
o.value('blacklist', _('Blacklist mode'));
o.value('whitelist', _('Whitelist mode'));
o.default = 'disabled';
o.rmempty = false;
o = s.taboption('subscription', form.DynamicList, 'filter_keywords', _('Filter keywords'),
_('Drop/keep nodes that contain the specific keywords. <a target="_blank" href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions">Regex</a> is supported.'));
o.depends({'filter_nodes': 'disabled', '!reverse': true});
o.rmempty = false;
o = s.taboption('subscription', form.Value, 'user_agent', _('User-Agent'));
o.placeholder = 'Wget/1.21 (HomeProxy, like v2rayN)';
o = s.taboption('subscription', form.Flag, 'allow_insecure', _('Allow insecure'),
_('Allow insecure connection by default when add nodes from subscriptions.') +
'<br/>' +
_('This is <strong>DANGEROUS</strong>, your traffic is almost like <strong>PLAIN TEXT</strong>! Use at your own risk!'));
o.rmempty = false;
o.onchange = allowInsecureConfirm;
o = s.taboption('subscription', form.ListValue, 'packet_encoding', _('Default packet encoding'));
o.value('', _('none'));
o.value('packetaddr', _('packet addr (v2ray-core v5+)'));
o.value('xudp', _('Xudp (Xray-core)'));
o = s.taboption('subscription', form.Button, '_save_subscriptions', _('Save subscriptions settings'),
_('NOTE: Save current settings before updating subscriptions.'));
o.inputstyle = 'apply';
o.inputtitle = _('Save current settings');
o.onclick = function() {
return this.map.save(null, true).then(() => {
ui.changes.apply(true);
});
}
o = s.taboption('subscription', form.Button, '_update_subscriptions', _('Update nodes from subscriptions'));
o.inputstyle = 'apply';
o.inputtitle = function(section_id) {
let sublist = uci.get(data[0], section_id, 'subscription_url') || [];
if (sublist.length > 0) {
return _('Update %s subscriptions').format(sublist.length);
} else {
this.readonly = true;
return _('No subscription available')
}
}
o.onclick = function() {
return fs.exec_direct('/etc/homeproxy/scripts/update_subscriptions.uc').then((res) => {
return location.reload();
}).catch((err) => {
ui.addNotification(null, E('p', _('An error occurred during updating subscriptions: %s').format(err)));
return this.map.reset();
});
}
o = s.taboption('subscription', form.Button, '_remove_subscriptions', _('Remove all nodes from subscriptions'));
o.inputstyle = 'reset';
o.inputtitle = function() {
let subnodes = [];
uci.sections(data[0], 'node', (res) => {
if (res.grouphash)
subnodes = subnodes.concat(res['.name'])
});
if (subnodes.length > 0) {
return _('Remove %s nodes').format(subnodes.length);
} else {
this.readonly = true;
return _('No subscription node');
}
}
o.onclick = function() {
let subnodes = [];
uci.sections(data[0], 'node', (res) => {
if (res.grouphash)
subnodes = subnodes.concat(res['.name'])
});
for (let i in subnodes)
uci.remove(data[0], subnodes[i]);
if (subnodes.includes(uci.get(data[0], 'config', 'main_node')))
uci.set(data[0], 'config', 'main_node', 'nil');
if (subnodes.includes(uci.get(data[0], 'config', 'main_udp_node')))
uci.set(data[0], 'config', 'main_udp_node', 'nil');
this.inputtitle = _('%s nodes removed').format(subnodes.length);
this.readonly = true;
return this.map.save(null, true);
}
/* Subscriptions settings end */
return m.render();
}
});
|
28harishkumar/blog
| 1,593
|
resources/views/posts/edit.blade.php
|
@extends('app')
@section('title')
Edit Post
@endsection
@section('content')
<script type="text/javascript" src="{{ asset('/js/tinymce/tinymce.min.js') }}"></script>
<script type="text/javascript">
tinymce.init({
selector : "textarea",
plugins : ["advlist autolink lists link image charmap print preview anchor", "searchreplace visualblocks code fullscreen", "insertdatetime media table contextmenu paste jbimages"],
toolbar : "insertfile undo redo | styleselect | bold italic | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image jbimages"
});
</script>
<form method="post" action='{{ url("/update") }}'>
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<input type="hidden" name="post_id" value="{{ $post->id }}{{ old('post_id') }}">
<div class="form-group">
<input required="required" placeholder="Enter title here" type="text" name = "title" class="form-control" value="@if(!old('title')){{$post->title}}@endif{{ old('title') }}"/>
</div>
<div class="form-group">
<textarea name='body'class="form-control">
@if(!old('body'))
{!! $post->body !!}
@endif
{!! old('body') !!}
</textarea>
</div>
@if($post->active == '1')
<input type="submit" name='publish' class="btn btn-success" value = "Update"/>
@else
<input type="submit" name='publish' class="btn btn-success" value = "Publish"/>
@endif
<input type="submit" name='save' class="btn btn-default" value = "Save As Draft" />
<a href="{{ url('delete/'.$post->id.'?_token='.csrf_token()) }}" class="btn btn-danger">Delete</a>
</form>
@endsection
|
28harishkumar/blog
| 1,711
|
resources/views/posts/show.blade.php
|
@extends('app')
@section('title')
@if($post)
{{ $post->title }}
@if(!Auth::guest() && ($post->author_id == Auth::user()->id || Auth::user()->is_admin()))
<button class="btn" style="float: right"><a href="{{ url('edit/'.$post->slug)}}">Edit Post</a></button>
@endif
@else
Page does not exist
@endif
@endsection
@section('title-meta')
<p>{{ $post->created_at->format('M d,Y \a\t h:i a') }} By <a href="{{ url('/user/'.$post->author_id)}}">{{ $post->author->name }}</a></p>
@endsection
@section('content')
@if($post)
<div>
{!! $post->body !!}
</div>
<div>
<h2>Leave a comment</h2>
</div>
@if(Auth::guest())
<p>Login to Comment</p>
@else
<div class="panel-body">
<form method="post" action="/comment/add">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<input type="hidden" name="on_post" value="{{ $post->id }}">
<input type="hidden" name="slug" value="{{ $post->slug }}">
<div class="form-group">
<textarea required="required" placeholder="Enter comment here" name = "body" class="form-control"></textarea>
</div>
<input type="submit" name='post_comment' class="btn btn-success" value = "Post"/>
</form>
</div>
@endif
<div>
@if($comments)
<ul style="list-style: none; padding: 0">
@foreach($comments as $comment)
<li class="panel-body">
<div class="list-group">
<div class="list-group-item">
<h3>{{ $comment->author->name }}</h3>
<p>{{ $comment->created_at->format('M d,Y \a\t h:i a') }}</p>
</div>
<div class="list-group-item">
<p>{{ $comment->body }}</p>
</div>
</div>
</li>
@endforeach
</ul>
@endif
</div>
@else
404 error
@endif
@endsection
|
28harishkumar/blog
| 1,176
|
resources/views/posts/create.blade.php
|
@extends('app')
@section('title')
Add New Post
@endsection
@section('content')
<script type="text/javascript" src="{{ asset('/js/tinymce/tinymce.min.js') }}"></script>
<script type="text/javascript">
tinymce.init({
selector : "textarea",
plugins : ["advlist autolink lists link image charmap print preview anchor", "searchreplace visualblocks code fullscreen", "insertdatetime media table contextmenu paste jbimages"],
toolbar : "insertfile undo redo | styleselect | bold italic | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image jbimages",
});
</script>
<form action="/new-post" method="post">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<div class="form-group">
<input required="required" value="{{ old('title') }}" placeholder="Enter title here" type="text" name = "title"class="form-control" />
</div>
<div class="form-group">
<textarea name='body'class="form-control">{{ old('body') }}</textarea>
</div>
<input type="submit" name='publish' class="btn btn-success" value = "Publish"/>
<input type="submit" name='save' class="btn btn-default" value = "Save Draft" />
</form>
@endsection
|
2929004360/ruoyi-sign
| 5,208
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/entity/SysRole.java
|
package com.ruoyi.common.core.domain.entity;
import java.util.Set;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.ruoyi.common.annotation.Excel;
import com.ruoyi.common.annotation.Excel.ColumnType;
import com.ruoyi.common.core.domain.BaseEntity;
/**
* 角色表 sys_role
*
* @author ruoyi
*/
public class SysRole extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 角色ID */
@Excel(name = "角色序号", cellType = ColumnType.NUMERIC)
private Long roleId;
/** 角色名称 */
@Excel(name = "角色名称")
private String roleName;
/** 角色权限 */
@Excel(name = "角色权限")
private String roleKey;
/** 角色排序 */
@Excel(name = "角色排序")
private Integer roleSort;
/** 数据范围(1:所有数据权限;2:自定义数据权限;3:本部门数据权限;4:本部门及以下数据权限;5:仅本人数据权限) */
@Excel(name = "数据范围", readConverterExp = "1=所有数据权限,2=自定义数据权限,3=本部门数据权限,4=本部门及以下数据权限,5=仅本人数据权限")
private String dataScope;
/** 菜单树选择项是否关联显示( 0:父子不互相关联显示 1:父子互相关联显示) */
private boolean menuCheckStrictly;
/** 部门树选择项是否关联显示(0:父子不互相关联显示 1:父子互相关联显示 ) */
private boolean deptCheckStrictly;
/** 角色状态(0正常 1停用) */
@Excel(name = "角色状态", readConverterExp = "0=正常,1=停用")
private String status;
/** 删除标志(0代表存在 2代表删除) */
private String delFlag;
/** 用户是否存在此角色标识 默认不存在 */
private boolean flag = false;
/** 菜单组 */
private Long[] menuIds;
/** 部门组(数据权限) */
private Long[] deptIds;
/** 角色菜单权限 */
private Set<String> permissions;
public SysRole()
{
}
public SysRole(Long roleId)
{
this.roleId = roleId;
}
public Long getRoleId()
{
return roleId;
}
public void setRoleId(Long roleId)
{
this.roleId = roleId;
}
public boolean isAdmin()
{
return isAdmin(this.roleId);
}
public static boolean isAdmin(Long roleId)
{
return roleId != null && 1L == roleId;
}
@NotBlank(message = "角色名称不能为空")
@Size(min = 0, max = 30, message = "角色名称长度不能超过30个字符")
public String getRoleName()
{
return roleName;
}
public void setRoleName(String roleName)
{
this.roleName = roleName;
}
@NotBlank(message = "权限字符不能为空")
@Size(min = 0, max = 100, message = "权限字符长度不能超过100个字符")
public String getRoleKey()
{
return roleKey;
}
public void setRoleKey(String roleKey)
{
this.roleKey = roleKey;
}
@NotNull(message = "显示顺序不能为空")
public Integer getRoleSort()
{
return roleSort;
}
public void setRoleSort(Integer roleSort)
{
this.roleSort = roleSort;
}
public String getDataScope()
{
return dataScope;
}
public void setDataScope(String dataScope)
{
this.dataScope = dataScope;
}
public boolean isMenuCheckStrictly()
{
return menuCheckStrictly;
}
public void setMenuCheckStrictly(boolean menuCheckStrictly)
{
this.menuCheckStrictly = menuCheckStrictly;
}
public boolean isDeptCheckStrictly()
{
return deptCheckStrictly;
}
public void setDeptCheckStrictly(boolean deptCheckStrictly)
{
this.deptCheckStrictly = deptCheckStrictly;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
public String getDelFlag()
{
return delFlag;
}
public void setDelFlag(String delFlag)
{
this.delFlag = delFlag;
}
public boolean isFlag()
{
return flag;
}
public void setFlag(boolean flag)
{
this.flag = flag;
}
public Long[] getMenuIds()
{
return menuIds;
}
public void setMenuIds(Long[] menuIds)
{
this.menuIds = menuIds;
}
public Long[] getDeptIds()
{
return deptIds;
}
public void setDeptIds(Long[] deptIds)
{
this.deptIds = deptIds;
}
public Set<String> getPermissions()
{
return permissions;
}
public void setPermissions(Set<String> permissions)
{
this.permissions = permissions;
}
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("roleId", getRoleId())
.append("roleName", getRoleName())
.append("roleKey", getRoleKey())
.append("roleSort", getRoleSort())
.append("dataScope", getDataScope())
.append("menuCheckStrictly", isMenuCheckStrictly())
.append("deptCheckStrictly", isDeptCheckStrictly())
.append("status", getStatus())
.append("delFlag", getDelFlag())
.append("createBy", getCreateBy())
.append("createTime", getCreateTime())
.append("updateBy", getUpdateBy())
.append("updateTime", getUpdateTime())
.append("remark", getRemark())
.toString();
}
}
|
2929004360/ruoyi-sign
| 5,851
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/entity/SysUser.java
|
package com.ruoyi.common.core.domain.entity;
import java.util.Date;
import java.util.List;
import javax.validation.constraints.*;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.ruoyi.common.annotation.Excel;
import com.ruoyi.common.annotation.Excel.ColumnType;
import com.ruoyi.common.annotation.Excel.Type;
import com.ruoyi.common.annotation.Excels;
import com.ruoyi.common.core.domain.BaseEntity;
import com.ruoyi.common.xss.Xss;
/**
* 用户对象 sys_user
*
* @author ruoyi
*/
public class SysUser extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 用户ID */
@Excel(name = "用户序号", type = Type.EXPORT, cellType = ColumnType.NUMERIC, prompt = "用户编号")
private Long userId;
/** 部门ID */
@Excel(name = "部门编号", type = Type.IMPORT)
private Long deptId;
/** 用户账号 */
@Excel(name = "登录名称")
private String userName;
/** 用户昵称 */
@Excel(name = "用户名称")
private String nickName;
/** 用户邮箱 */
@Excel(name = "用户邮箱")
private String email;
/** 手机号码 */
@Excel(name = "手机号码", cellType = ColumnType.TEXT)
private String phonenumber;
/** 用户性别 */
@Excel(name = "用户性别", readConverterExp = "0=男,1=女,2=未知")
private String sex;
/** 用户头像 */
private String avatar;
/** 密码 */
private String password;
/**
* 微信openid
*/
private String openid;
/** 帐号状态(0正常 1停用) */
@Excel(name = "帐号状态", readConverterExp = "0=正常,1=停用")
private String status;
/** 删除标志(0代表存在 2代表删除) */
private String delFlag;
/** 最后登录IP */
@Excel(name = "最后登录IP", type = Type.EXPORT)
private String loginIp;
/** 最后登录时间 */
@Excel(name = "最后登录时间", width = 30, dateFormat = "yyyy-MM-dd HH:mm:ss", type = Type.EXPORT)
private Date loginDate;
/** 部门对象 */
@Excels({
@Excel(name = "部门名称", targetAttr = "deptName", type = Type.EXPORT),
@Excel(name = "部门负责人", targetAttr = "leader", type = Type.EXPORT)
})
private SysDept dept;
/** 角色对象 */
private List<SysRole> roles;
/** 角色组 */
private Long[] roleIds;
/** 岗位组 */
private Long[] postIds;
/** 角色ID */
private Long roleId;
public SysUser()
{
}
public SysUser(Long userId)
{
this.userId = userId;
}
public Long getUserId()
{
return userId;
}
public void setUserId(Long userId)
{
this.userId = userId;
}
public boolean isAdmin()
{
return isAdmin(this.userId);
}
public static boolean isAdmin(Long userId)
{
return userId != null && 1L == userId;
}
public Long getDeptId()
{
return deptId;
}
public void setDeptId(Long deptId)
{
this.deptId = deptId;
}
@Xss(message = "用户昵称不能包含脚本字符")
@Size(min = 0, max = 30, message = "用户昵称长度不能超过30个字符")
public String getNickName()
{
return nickName;
}
public void setNickName(String nickName)
{
this.nickName = nickName;
}
@Xss(message = "用户账号不能包含脚本字符")
@NotBlank(message = "用户账号不能为空")
@Size(min = 0, max = 30, message = "用户账号长度不能超过30个字符")
public String getUserName()
{
return userName;
}
public void setUserName(String userName)
{
this.userName = userName;
}
@Email(message = "邮箱格式不正确")
@Size(min = 0, max = 50, message = "邮箱长度不能超过50个字符")
public String getEmail()
{
return email;
}
public void setEmail(String email)
{
this.email = email;
}
@Size(min = 0, max = 11, message = "手机号码长度不能超过11个字符")
@NotBlank(message = "手机号码不能为空")
public String getPhonenumber()
{
return phonenumber;
}
public void setPhonenumber(String phonenumber)
{
this.phonenumber = phonenumber;
}
public String getSex()
{
return sex;
}
public void setSex(String sex)
{
this.sex = sex;
}
public String getAvatar()
{
return avatar;
}
public void setAvatar(String avatar)
{
this.avatar = avatar;
}
public String getPassword()
{
return password;
}
public void setPassword(String password)
{
this.password = password;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
public String getDelFlag()
{
return delFlag;
}
public void setDelFlag(String delFlag)
{
this.delFlag = delFlag;
}
public String getLoginIp()
{
return loginIp;
}
public void setLoginIp(String loginIp)
{
this.loginIp = loginIp;
}
public Date getLoginDate()
{
return loginDate;
}
public void setLoginDate(Date loginDate)
{
this.loginDate = loginDate;
}
public SysDept getDept()
{
return dept;
}
public void setDept(SysDept dept)
{
this.dept = dept;
}
public List<SysRole> getRoles()
{
return roles;
}
public void setRoles(List<SysRole> roles)
{
this.roles = roles;
}
public Long[] getRoleIds()
{
return roleIds;
}
public void setRoleIds(Long[] roleIds)
{
this.roleIds = roleIds;
}
public Long[] getPostIds()
{
return postIds;
}
public void setPostIds(Long[] postIds)
{
this.postIds = postIds;
}
public Long getRoleId()
{
return roleId;
}
public void setRoleId(Long roleId)
{
this.roleId = roleId;
}
public String getOpenid() {
return openid;
}
public void setOpenid(String openid) {
this.openid = openid;
}
}
|
2977094657/BilibiliHistoryFetcher
| 17,695
|
scripts/sync_db_json.py
|
import json
import logging
import os
import shutil
import sqlite3
from datetime import datetime
# 配置日志
# 确保输出目录存在
os.makedirs("output/check", exist_ok=True)
# 设置自定义的日志格式化器,以正确处理中文字符
class EncodingFormatter(logging.Formatter):
def format(self, record):
msg = super().format(record)
# 确保返回的是原始字符串,不会被转义
if isinstance(msg, str):
return msg
else:
return str(msg)
# 配置日志处理程序
formatter = EncodingFormatter('%(asctime)s - %(levelname)s - %(message)s')
file_handler = logging.FileHandler("output/check/sync_db_json.log", mode='a', encoding='utf-8')
file_handler.setFormatter(formatter)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.handlers = [] # 清除可能存在的处理程序
logger.addHandler(file_handler)
logger.addHandler(console_handler)
logger.propagate = False # 防止日志消息传播到根日志器
def get_json_files(json_root_path):
"""获取所有JSON文件的路径"""
json_files = []
for year_dir in os.listdir(json_root_path):
year_path = os.path.join(json_root_path, year_dir)
if not os.path.isdir(year_path) or not year_dir.isdigit():
continue
for month_dir in os.listdir(year_path):
month_path = os.path.join(year_path, month_dir)
if not os.path.isdir(month_path) or not month_dir.isdigit():
continue
for day_file in os.listdir(month_path):
if not day_file.endswith('.json'):
continue
day_path = os.path.join(month_path, day_file)
json_files.append({
'path': day_path,
'year': int(year_dir),
'month': int(month_dir),
'day': int(day_file.split('.')[0])
})
return json_files
def get_db_tables(db_path):
"""获取数据库中的所有表名"""
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = [row[0] for row in cursor.fetchall()]
conn.close()
return tables
except Exception as e:
logger.error(f"获取数据库表时出错: {e}")
return []
def load_json_file(file_path):
"""读取JSON文件"""
try:
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
except Exception as e:
logger.error(f"读取JSON文件 {file_path} 时出错: {e}")
return []
def save_json_file(file_path, data):
"""保存JSON文件"""
try:
# 确保目录存在
os.makedirs(os.path.dirname(file_path), exist_ok=True)
# 备份原文件(如果存在)
if os.path.exists(file_path):
backup_dir = os.path.join('output', 'check', 'backups')
os.makedirs(backup_dir, exist_ok=True)
file_name = os.path.basename(file_path)
backup_path = os.path.join(backup_dir, f"{datetime.now().strftime('%Y%m%d%H%M%S')}_{file_name}")
shutil.copy2(file_path, backup_path)
logger.info(f"原文件已备份到 {backup_path}")
# 保存新文件
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
logger.info(f"数据已保存到 {file_path}")
return True
except Exception as e:
logger.error(f"保存JSON文件 {file_path} 时出错: {e}")
return False
def get_records_from_db(db_path, year, month, day):
"""从db中获取某天的所有记录并转换成JSON格式"""
try:
# 计算目标日期的时间戳范围
start_date = datetime(year, month, day).timestamp()
end_date = datetime(year, month, day, 23, 59, 59).timestamp()
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row # 将结果转换为字典格式
cursor = conn.cursor()
# 获取当天所有记录
table_name = f"bilibili_history_{year}"
cursor.execute(f"""
SELECT * FROM {table_name}
WHERE view_at >= ? AND view_at <= ?
ORDER BY view_at DESC
""", (start_date, end_date))
records = cursor.fetchall()
records_list = [dict(record) for record in records]
conn.close()
# 将数据库记录转换为JSON格式
json_records = []
for record in records_list:
json_record = {
"title": record["title"],
"long_title": record["long_title"],
"cover": record["cover"],
"covers": json.loads(record["covers"]) if record["covers"] else None,
"uri": record["uri"],
"history": {
"oid": record["oid"],
"epid": record["epid"],
"bvid": record["bvid"],
"page": record["page"],
"cid": record["cid"],
"part": record["part"],
"business": record["business"],
"dt": record["dt"]
},
"videos": record["videos"],
"author_name": record["author_name"],
"author_face": record["author_face"],
"author_mid": record["author_mid"],
"view_at": record["view_at"],
"progress": record["progress"],
"badge": record["badge"],
"show_title": record["show_title"],
"duration": record["duration"],
"current": record["current"],
"total": record["total"],
"new_desc": record["new_desc"],
"is_finish": record["is_finish"],
"is_fav": record["is_fav"],
"kid": record["kid"],
"tag_name": record["tag_name"],
"live_status": record["live_status"]
}
json_records.append(json_record)
logger.info(f"从数据库中获取了 {len(json_records)} 条 {year}年{month}月{day}日的记录")
return json_records
except Exception as e:
logger.error(f"从数据库获取记录时出错: {e}")
return []
def import_records_to_db(db_path, records, year):
"""将记录导入到数据库"""
if not records:
return 0
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# 检查表是否存在
table_name = f"bilibili_history_{year}"
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table_name,))
if not cursor.fetchone():
logger.error(f"数据库表 {table_name} 不存在")
conn.close()
return 0
# 获取表结构
cursor.execute(f"PRAGMA table_info({table_name})")
columns = [column[1] for column in cursor.fetchall()]
imported_count = 0
for record in records:
# 检查记录是否已存在
view_at = record.get('view_at', 0)
bvid = record.get('history', {}).get('bvid', '')
cursor.execute(f"SELECT id FROM {table_name} WHERE view_at = ? AND bvid = ?", (view_at, bvid))
if cursor.fetchone():
logger.debug(f"记录已存在: {record.get('title')} ({bvid}, {view_at})")
continue
# 构建导入数据
data = {
"id": None, # 自增字段
"title": record.get('title', ''),
"long_title": record.get('long_title', ''),
"cover": record.get('cover', ''),
"covers": json.dumps(record.get('covers', [])),
"uri": record.get('uri', ''),
"oid": record.get('history', {}).get('oid', 0),
"epid": record.get('history', {}).get('epid', 0),
"bvid": record.get('history', {}).get('bvid', ''),
"page": record.get('history', {}).get('page', 1),
"cid": record.get('history', {}).get('cid', 0),
"part": record.get('history', {}).get('part', ''),
"business": record.get('history', {}).get('business', ''),
"dt": record.get('history', {}).get('dt', 0),
"videos": record.get('videos', 1),
"author_name": record.get('author_name', ''),
"author_face": record.get('author_face', ''),
"author_mid": record.get('author_mid', 0),
"view_at": view_at,
"progress": record.get('progress', 0),
"badge": record.get('badge', ''),
"show_title": record.get('show_title', ''),
"duration": record.get('duration', 0),
"current": record.get('current', ''),
"total": record.get('total', 0),
"new_desc": record.get('new_desc', ''),
"is_finish": record.get('is_finish', 0),
"is_fav": record.get('is_fav', 0),
"kid": record.get('kid', 0),
"tag_name": record.get('tag_name', ''),
"live_status": record.get('live_status', 0),
"main_category": None # 默认为Null
}
# 生成SQL语句
valid_columns = [col for col in data.keys() if col in columns]
placeholders = ', '.join(['?' for _ in valid_columns])
sql = f"INSERT INTO {table_name} ({', '.join(valid_columns)}) VALUES ({placeholders})"
# 导入数据
values = [data[col] for col in valid_columns]
cursor.execute(sql, values)
imported_count += 1
conn.commit()
conn.close()
logger.info(f"成功导入 {imported_count} 条记录到表 {table_name}")
return imported_count
except Exception as e:
logger.error(f"导入记录到数据库时出错: {e}")
return 0
def sync_json_to_db(db_path, json_root_path):
"""将JSON文件中的记录导入到数据库"""
json_files = get_json_files(json_root_path)
total_imported = 0
synced_days = []
for json_file in json_files:
year, month, day = json_file['year'], json_file['month'], json_file['day']
file_path = json_file['path']
# 读取JSON文件
json_records = load_json_file(file_path)
if not json_records:
continue
# 导入记录到数据库
imported_count = import_records_to_db(db_path, json_records, year)
total_imported += imported_count
if imported_count > 0:
# 记录已导入的日期和标题信息
imported_titles = [record.get('title', '未知标题') for record in json_records[:10]] # 最多记录10个标题
synced_days.append({
"date": f"{year}-{month:02d}-{day:02d}",
"imported_count": imported_count,
"source": "json_to_db",
"titles": imported_titles if len(imported_titles) <= 10 else imported_titles[:10]
})
logger.info(f"从 {file_path} 导入了 {imported_count} 条记录到数据库")
return total_imported, synced_days
def sync_db_to_json(db_path, json_root_path):
"""将数据库中的记录导入到JSON文件"""
# 获取数据库中的表名
db_tables = get_db_tables(db_path)
history_tables = [table for table in db_tables if table.startswith('bilibili_history_')]
total_restored = 0
synced_days = []
# 获取JSON文件列表
json_files = get_json_files(json_root_path)
json_file_dict = {}
# 构建日期的路径到文件的映射制式
for file_info in json_files:
key = f"{file_info['year']}-{file_info['month']}-{file_info['day']}"
json_file_dict[key] = file_info['path']
# 遍历数据库中的表
for table in history_tables:
year = int(table.split('_')[-1])
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# 获取表中的不同日期的记录
cursor.execute(f"""
SELECT strftime('%Y-%m-%d', datetime(view_at, 'unixepoch')) as date_str,
strftime('%Y', datetime(view_at, 'unixepoch')) as year,
strftime('%m', datetime(view_at, 'unixepoch')) as month,
strftime('%d', datetime(view_at, 'unixepoch')) as day
FROM {table}
GROUP BY date_str
ORDER BY date_str
""")
dates = cursor.fetchall()
conn.close()
for date in dates:
date_str, db_year, db_month, db_day = date
db_year, db_month, db_day = int(db_year), int(db_month), int(db_day)
# 构建日期的路径
json_path = json_file_dict.get(date_str)
if not json_path:
# 如果日期的路径不存在,创建JSON文件
json_path = os.path.join(json_root_path, str(db_year), f"{db_month:02d}", f"{db_day:02d}.json")
# 从数据库中获取日期的记录
db_records = get_records_from_db(db_path, db_year, db_month, db_day)
# 如果日期的JSON文件存在,合成记录
if os.path.exists(json_path):
json_records = load_json_file(json_path)
# 判除重复的记录
existing_keys = set((record.get('view_at', 0), record.get('history', {}).get('bvid', ''))
for record in json_records)
# 获取数据库中的新记录
new_records = []
for db_record in db_records:
key = (db_record.get('view_at', 0), db_record.get('history', {}).get('bvid', ''))
if key not in existing_keys:
new_records.append(db_record)
existing_keys.add(key)
# 合成记录
if new_records:
combined_records = json_records + new_records
# 按时间进行排列
combined_records.sort(key=lambda x: x.get('view_at', 0), reverse=True)
if save_json_file(json_path, combined_records):
# 记录同步信息
titles = [record.get('title', '未知标题') for record in new_records[:10]]
synced_days.append({
"date": f"{db_year}-{db_month:02d}-{db_day:02d}",
"imported_count": len(new_records),
"source": "db_to_json",
"titles": titles if len(titles) <= 10 else titles[:10]
})
logger.info(f"已新增 {json_path} 了 {len(new_records)} 条记录")
total_restored += len(new_records)
else:
# 日期的JSON文件不存在,创建JSON文件
if db_records and save_json_file(json_path, db_records):
# 记录同步信息
titles = [record.get('title', '未知标题') for record in db_records[:10]]
synced_days.append({
"date": f"{db_year}-{db_month:02d}-{db_day:02d}",
"imported_count": len(db_records),
"source": "db_to_json",
"titles": titles if len(titles) <= 10 else titles[:10]
})
logger.info(f"已创建 {json_path} 了 {len(db_records)} 条记录")
total_restored += len(db_records)
except Exception as e:
logger.error(f"将 {year} 年的数据库中的记录导入JSON文件时出错: {e}")
return total_restored, synced_days
def sync_data(db_path=None, json_root_path=None):
"""同步数据库和JSON文件的接口函数"""
# 配置路径
if db_path is None:
db_path = os.path.join('output', 'bilibili_history.db')
if json_root_path is None:
json_root_path = os.path.join('output', 'history_by_date')
# 创建,计算路径
output_dir = os.path.join('output', 'check')
os.makedirs(output_dir, exist_ok=True)
logger.info("===== 开始同步数据库和不同的JSON文件 =====")
logger.info(f"数据库路径: {db_path}")
logger.info(f"JSON文件路径: {json_root_path}")
# 1. 将JSON文件中的记录导入到数据库
json_to_db_count, json_to_db_days = sync_json_to_db(db_path, json_root_path)
logger.info(f"从JSON文件中获取了 {json_to_db_count} 条记录导入到数据库")
# 2. 将数据库中的记录导入到JSON文件
db_to_json_count, db_to_json_days = sync_db_to_json(db_path, json_root_path)
logger.info(f"从数据库中获取了 {db_to_json_count} 条记录导入到JSON文件")
# 合并同步天数信息
all_synced_days = json_to_db_days + db_to_json_days
# 按日期排序
all_synced_days.sort(key=lambda x: x['date'], reverse=True)
logger.info("===== 同步成功。 =====")
logger.info(f"总条记录: 从JSON文件导入到数据库 {json_to_db_count} 条,从数据库导入到JSON文件 {db_to_json_count} 条")
# 保存同步结果到JSON文件
sync_result_file = os.path.join(output_dir, "sync_result.json")
sync_result = {
"success": True,
"json_to_db_count": json_to_db_count,
"db_to_json_count": db_to_json_count,
"total_synced": json_to_db_count + db_to_json_count,
"synced_days": all_synced_days,
"timestamp": datetime.now().isoformat()
}
with open(sync_result_file, 'w', encoding='utf-8') as f:
json.dump(sync_result, f, ensure_ascii=False, indent=4)
logger.info(f"同步结果已保存到 {sync_result_file}")
return sync_result
def main():
# 配置路径
db_path = os.path.join('output', 'bilibili_history.db')
json_root_path = os.path.join('output', 'history_by_date')
sync_result = sync_data(db_path, json_root_path)
print(f"同步完成,总共同步 {sync_result['total_synced']} 条记录")
if __name__ == '__main__':
main()
|
281677160/openwrt-package
| 7,664
|
luci-app-homeproxy/htdocs/luci-static/resources/view/homeproxy/status.js
|
/*
* SPDX-License-Identifier: GPL-2.0-only
*
* Copyright (C) 2022-2025 ImmortalWrt.org
*/
'use strict';
'require dom';
'require form';
'require fs';
'require poll';
'require rpc';
'require uci';
'require ui';
'require view';
/* Thanks to luci-app-aria2 */
const css = ' \
#log_textarea { \
padding: 10px; \
text-align: left; \
} \
#log_textarea pre { \
padding: .5rem; \
word-break: break-all; \
margin: 0; \
} \
.description { \
background-color: #33ccff; \
}';
const hp_dir = '/var/run/homeproxy';
function getConnStat(o, site) {
const callConnStat = rpc.declare({
object: 'luci.homeproxy',
method: 'connection_check',
params: ['site'],
expect: { '': {} }
});
o.default = E('div', { 'style': 'cbi-value-field' }, [
E('button', {
'class': 'btn cbi-button cbi-button-action',
'click': ui.createHandlerFn(this, () => {
return L.resolveDefault(callConnStat(site), {}).then((ret) => {
let ele = o.default.firstElementChild.nextElementSibling;
if (ret.result) {
ele.style.setProperty('color', 'green');
ele.innerHTML = _('passed');
} else {
ele.style.setProperty('color', 'red');
ele.innerHTML = _('failed');
}
});
})
}, [ _('Check') ]),
' ',
E('strong', { 'style': 'color:gray' }, _('unchecked')),
]);
}
function getResVersion(o, type) {
const callResVersion = rpc.declare({
object: 'luci.homeproxy',
method: 'resources_get_version',
params: ['type'],
expect: { '': {} }
});
const callResUpdate = rpc.declare({
object: 'luci.homeproxy',
method: 'resources_update',
params: ['type'],
expect: { '': {} }
});
return L.resolveDefault(callResVersion(type), {}).then((res) => {
let spanTemp = E('div', { 'style': 'cbi-value-field' }, [
E('button', {
'class': 'btn cbi-button cbi-button-action',
'click': ui.createHandlerFn(this, () => {
return L.resolveDefault(callResUpdate(type), {}).then((res) => {
switch (res.status) {
case 0:
o.description = _('Successfully updated.');
break;
case 1:
o.description = _('Update failed.');
break;
case 2:
o.description = _('Already in updating.');
break;
case 3:
o.description = _('Already at the latest version.');
break;
default:
o.description = _('Unknown error.');
break;
}
return o.map.reset();
});
})
}, [ _('Check update') ]),
' ',
E('strong', { 'style': (res.error ? 'color:red' : 'color:green') },
[ res.error ? 'not found' : res.version ]
),
]);
o.default = spanTemp;
});
}
function getRuntimeLog(o, name, _option_index, section_id, _in_table) {
const filename = o.option.split('_')[1];
let section, log_level_el;
switch (filename) {
case 'homeproxy':
section = null;
break;
case 'sing-box-c':
section = 'config';
break;
case 'sing-box-s':
section = 'server';
break;
}
if (section) {
const selected = uci.get('homeproxy', section, 'log_level') || 'warn';
const choices = {
trace: _('Trace'),
debug: _('Debug'),
info: _('Info'),
warn: _('Warn'),
error: _('Error'),
fatal: _('Fatal'),
panic: _('Panic')
};
log_level_el = E('select', {
'id': o.cbid(section_id),
'class': 'cbi-input-select',
'style': 'margin-left: 4px; width: 6em;',
'change': ui.createHandlerFn(this, (ev) => {
uci.set('homeproxy', section, 'log_level', ev.target.value);
return o.map.save(null, true).then(() => {
ui.changes.apply(true);
});
})
});
Object.keys(choices).forEach((v) => {
log_level_el.appendChild(E('option', {
'value': v,
'selected': (v === selected) ? '' : null
}, [ choices[v] ]));
});
}
const callLogClean = rpc.declare({
object: 'luci.homeproxy',
method: 'log_clean',
params: ['type'],
expect: { '': {} }
});
const log_textarea = E('div', { 'id': 'log_textarea' },
E('img', {
'src': L.resource('icons/loading.svg'),
'alt': _('Loading'),
'style': 'vertical-align:middle'
}, _('Collecting data...'))
);
let log;
poll.add(L.bind(() => {
return fs.read_direct(String.format('%s/%s.log', hp_dir, filename), 'text')
.then((res) => {
log = E('pre', { 'wrap': 'pre' }, [
res.trim() || _('Log is empty.')
]);
dom.content(log_textarea, log);
}).catch((err) => {
if (err.toString().includes('NotFoundError'))
log = E('pre', { 'wrap': 'pre' }, [
_('Log file does not exist.')
]);
else
log = E('pre', { 'wrap': 'pre' }, [
_('Unknown error: %s').format(err)
]);
dom.content(log_textarea, log);
});
}));
return E([
E('style', [ css ]),
E('div', {'class': 'cbi-map'}, [
E('h3', {'name': 'content', 'style': 'align-items: center; display: flex;'}, [
_('%s log').format(name),
log_level_el || '',
E('button', {
'class': 'btn cbi-button cbi-button-action',
'style': 'margin-left: 4px;',
'click': ui.createHandlerFn(this, () => {
return L.resolveDefault(callLogClean(filename), {});
})
}, [ _('Clean log') ])
]),
E('div', {'class': 'cbi-section'}, [
log_textarea,
E('div', {'style': 'text-align:right'},
E('small', {}, _('Refresh every %s seconds.').format(L.env.pollinterval))
)
])
])
]);
}
return view.extend({
render() {
let m, s, o;
m = new form.Map('homeproxy');
s = m.section(form.NamedSection, 'config', 'homeproxy', _('Connection check'));
s.anonymous = true;
o = s.option(form.DummyValue, '_check_baidu', _('BaiDu'));
o.cfgvalue = L.bind(getConnStat, this, o, 'baidu');
o = s.option(form.DummyValue, '_check_google', _('Google'));
o.cfgvalue = L.bind(getConnStat, this, o, 'google');
s = m.section(form.NamedSection, 'config', 'homeproxy', _('Resources management'));
s.anonymous = true;
o = s.option(form.DummyValue, '_china_ip4_version', _('China IPv4 list version'));
o.cfgvalue = L.bind(getResVersion, this, o, 'china_ip4');
o.rawhtml = true;
o = s.option(form.DummyValue, '_china_ip6_version', _('China IPv6 list version'));
o.cfgvalue = L.bind(getResVersion, this, o, 'china_ip6');
o.rawhtml = true;
o = s.option(form.DummyValue, '_china_list_version', _('China list version'));
o.cfgvalue = L.bind(getResVersion, this, o, 'china_list');
o.rawhtml = true;
o = s.option(form.DummyValue, '_gfw_list_version', _('GFW list version'));
o.cfgvalue = L.bind(getResVersion, this, o, 'gfw_list');
o.rawhtml = true;
o = s.option(form.Value, 'github_token', _('GitHub token'));
o.password = true;
o.renderWidget = function() {
let node = form.Value.prototype.renderWidget.apply(this, arguments);
(node.querySelector('.control-group') || node).appendChild(E('button', {
'class': 'cbi-button cbi-button-apply',
'title': _('Save'),
'click': ui.createHandlerFn(this, () => {
return this.map.save(null, true).then(() => {
ui.changes.apply(true);
});
}, this.option)
}, [ _('Save') ]));
return node;
}
s = m.section(form.NamedSection, 'config', 'homeproxy');
s.anonymous = true;
o = s.option(form.DummyValue, '_homeproxy_logview');
o.render = L.bind(getRuntimeLog, this, o, _('HomeProxy'));
o = s.option(form.DummyValue, '_sing-box-c_logview');
o.render = L.bind(getRuntimeLog, this, o, _('sing-box client'));
o = s.option(form.DummyValue, '_sing-box-s_logview');
o.render = L.bind(getRuntimeLog, this, o, _('sing-box server'));
return m.render();
},
handleSaveApply: null,
handleSave: null,
handleReset: null
});
|
2977094657/BilibiliHistoryFetcher
| 58,090
|
scripts/scheduler_manager.py
|
import asyncio
import calendar
import os
import threading
import traceback
from datetime import datetime, timedelta
from typing import Optional, List
import httpx
import schedule
import yaml
from loguru import logger
from scripts.scheduler_db_enhanced import EnhancedSchedulerDB # 修改为导入增强版数据库
from scripts.utils import get_base_path, load_config, get_config_path, setup_logger
# 确保日志系统已初始化
setup_logger()
class SchedulerManager:
_instance = None
_lock = threading.Lock() # 添加线程锁
@classmethod
def get_instance(cls, app=None) -> 'SchedulerManager':
"""获取 SchedulerManager 的单例实例"""
if not cls._instance:
with cls._lock: # 使用线程锁确保线程安全
if not cls._instance: # 双重检查
if app is None:
raise ValueError("First initialization requires app instance")
cls._instance = cls(app)
return cls._instance
def __init__(self, app):
"""初始化调度管理器"""
# 防止重复初始化
if hasattr(self, '_initialized'): # 检查是否已初始化
return
self.app = app
self.tasks = {}
self.daily_tasks = {}
self.task_chains = {} # 通过依赖关系构建的任务链
self.scheduler = None
self.task_status = {}
self.chain_status = {}
self.is_running = False
self.log_capture = None
self.current_log_file = None
# 从config.yaml读取服务器配置
config = load_config()
server_config = config.get('server', {})
host = server_config.get('host', '0.0.0.0')
port = server_config.get('port', 8899)
# 根据服务器配置构建base_url
self.base_url = f"http://{host}:{port}"
print(f"从主配置文件读取服务器配置: {host}:{port}")
print(f"设置base_url为: {self.base_url}")
# 获取增强版数据库实例
self.db = EnhancedSchedulerDB.get_instance()
# 加载调度器配置(不再从这里获取base_url)
self.load_scheduler_config()
self._initialized = True # 标记为已初始化
def load_scheduler_config(self):
"""加载调度器配置"""
try:
base_path = get_base_path()
config_file = os.path.join(base_path, 'config', 'scheduler_config.yaml')
if not os.path.exists(config_file):
logger.warning(f"调度器配置文件不存在: {config_file}")
# 不再输出base_url相关日志,因为base_url已从主配置文件中读取
return
with open(config_file, 'r', encoding='utf-8') as f:
config = yaml.safe_load(f)
# 不再从scheduler_config.yaml读取base_url
# base_url已在__init__方法中从主配置文件读取
# 处理错误处理配置
if 'error_handling' in config:
self.notify_on_failure = config['error_handling'].get('notify_on_failure', True)
self.stop_on_failure = config['error_handling'].get('stop_on_failure', True)
# 处理调度器配置
if 'scheduler' in config:
scheduler_config = config['scheduler']
self.log_level = scheduler_config.get('log_level', 'INFO')
if 'retry' in scheduler_config:
self.retry_delay = scheduler_config['retry'].get('delay', 60)
self.max_retry_attempts = scheduler_config['retry'].get('max_attempts', 3)
# 清空现有任务
self.tasks = {}
# 检查数据库是否已初始化(通过检查是否有任何主任务)
main_tasks = self.db.get_all_main_tasks()
if not main_tasks and 'tasks' in config:
# 数据库未初始化,从配置文件加载
logger.info("数据库未初始化,从配置文件加载任务")
for task_id, task_config in config['tasks'].items():
# 创建新任务
task_name = task_config.get('name', task_id)
endpoint = task_config.get('endpoint', '')
method = task_config.get('method', 'GET')
params = task_config.get('params', {})
# 获取调度配置
schedule_config = task_config.get('schedule', {})
schedule_type = schedule_config.get('type', 'daily')
schedule_time = schedule_config.get('time') if schedule_type == 'daily' else None
schedule_delay = schedule_config.get('delay') if schedule_type == 'once' else None
# 获取interval类型的特殊配置
interval_value = None
interval_unit = None
if schedule_type == 'interval':
# 优先使用interval_value和interval_unit,其次使用value和unit
interval_value = schedule_config.get('interval_value', schedule_config.get('value'))
interval_unit = schedule_config.get('interval_unit', schedule_config.get('unit'))
print(f"从配置文件读取间隔任务: {task_id}, 间隔值={interval_value}, 单位={interval_unit}")
# 判断是否为主任务或子任务
task_type = 'main' # 默认为主任务
parent_id = None
sequence_number = None
# 获取依赖项
requires = task_config.get('requires', [])
# 将任务保存到数据库
task_data = {
'name': task_name,
'endpoint': endpoint,
'method': method,
'params': params,
'task_type': task_type,
'parent_id': parent_id,
'sequence_number': sequence_number,
'schedule_type': schedule_type,
'schedule_time': schedule_time,
'schedule_delay': schedule_delay,
'interval_value': interval_value,
'interval_unit': interval_unit,
'enabled': True
}
self.db.create_main_task(task_id, task_data)
# 添加依赖关系
for dep in requires:
self.db.add_task_dependency(task_id, dep)
else:
# 数据库已初始化,直接从数据库加载
logger.info("从数据库加载任务")
for task_data in main_tasks:
task_id = task_data['task_id']
self.tasks[task_id] = task_data
# 合并配置文件中新定义但数据库不存在的任务(便于新增任务生效)
try:
if 'tasks' in config:
existing_ids = {t['task_id'] for t in main_tasks}
for new_task_id, task_cfg in config['tasks'].items():
if new_task_id in existing_ids:
continue
# 解析调度配置
schedule_cfg = task_cfg.get('schedule', {})
schedule_type = schedule_cfg.get('type', 'daily')
schedule_time = schedule_cfg.get('time') if schedule_type == 'daily' else None
schedule_delay = schedule_cfg.get('delay') if schedule_type == 'once' else None
interval_value = None
interval_unit = None
if schedule_type == 'interval':
interval_value = schedule_cfg.get('interval_value', schedule_cfg.get('value'))
interval_unit = schedule_cfg.get('interval_unit', schedule_cfg.get('unit'))
merged_task_data = {
'name': task_cfg.get('name', new_task_id),
'endpoint': task_cfg.get('endpoint', ''),
'method': task_cfg.get('method', 'GET'),
'params': task_cfg.get('params', {}),
'task_type': 'main',
'parent_id': None,
'sequence_number': None,
'schedule_type': schedule_type,
'schedule_time': schedule_time,
'schedule_delay': schedule_delay,
'interval_value': interval_value,
'interval_unit': interval_unit,
'enabled': True
}
self.db.create_main_task(new_task_id, merged_task_data)
self.tasks[new_task_id] = merged_task_data
# 写入依赖关系
for dep in task_cfg.get('requires', []):
self.db.add_task_dependency(new_task_id, dep)
logger.info("已合并配置文件中的新增任务到数据库")
except Exception as merge_ex:
logger.warning(f"合并配置任务时出错(忽略): {merge_ex}")
# 应用 config.yaml 中的登录监控覆盖(开关与间隔)
try:
main_cfg = load_config()
monitor_cfg = (main_cfg.get('server') or {}).get('login_monitor') or {}
monitor_enabled = bool(monitor_cfg.get('enabled', True))
monitor_interval = int(monitor_cfg.get('interval_minutes', 10))
task_id = 'sessdata_health_check'
base_task = {
'name': 'SESSDATA健康检查',
'endpoint': '/login/check-and-notify',
'method': 'GET',
'params': {},
'task_type': 'main',
'parent_id': None,
'sequence_number': None,
'schedule_type': 'interval',
'schedule_time': None,
'schedule_delay': None,
'interval_value': monitor_interval,
'interval_unit': 'minutes',
'enabled': 1 if monitor_enabled else 0
}
if task_id in self.tasks:
self.tasks[task_id].update({
'schedule_type': 'interval',
'schedule_time': None,
'interval_value': monitor_interval,
'interval_unit': 'minutes',
'enabled': 1 if monitor_enabled else 0
})
try:
self.db.update_main_task(task_id, {
'schedule_type': 'interval',
'schedule_time': None,
'interval_value': monitor_interval,
'interval_unit': 'minutes',
'enabled': 1 if monitor_enabled else 0
})
except Exception:
pass
else:
if monitor_enabled:
self.db.create_main_task(task_id, base_task)
self.tasks[task_id] = base_task
logger.info("已根据配置创建登录监控任务 sessdata_health_check")
except Exception as e:
logger.warning(f"应用登录监控覆盖失败(忽略): {e}")
# 构建任务链
self._build_task_chains()
# 设置每日任务
self._setup_daily_tasks()
logger.info(f"成功加载 {len(self.tasks)} 个任务,{len(self.daily_tasks)} 个每日任务,{len(self.task_chains)} 个任务链")
self._initialized = True
except Exception as e:
logger.error(f"加载调度器配置失败: {str(e)}")
traceback_str = traceback.format_exc()
logger.debug(f"错误详情: {traceback_str}")
def _init_task_status_in_db(self):
"""初始化数据库中的任务状态记录"""
for task_id, task in self.tasks.items():
# 检查任务是否已在数据库中存在
task_status = self.db.get_task_status(task_id)
if not task_status:
# 新任务,添加到数据库
self.db.update_task_status(task_id, {
'name': task.get('name', task_id),
'enabled': 1
})
def _build_task_chains(self):
"""构建任务链(基于增强版数据库)"""
task_chains = {}
# 从数据库获取所有任务依赖关系
for task_id in self.tasks:
# 获取任务依赖项
dependencies = self.db.get_task_dependencies(task_id)
# 如果有依赖,就把当前任务添加到它们的后续任务中
for dep in dependencies:
if dep not in task_chains:
task_chains[dep] = []
if task_id not in task_chains[dep]:
task_chains[dep].append(task_id)
self.task_chains = task_chains
logger.info(f"构建了 {len(task_chains)} 个任务链")
# 输出每个任务链的详情
for source, targets in task_chains.items():
logger.info(f"任务链: {source} -> {', '.join(targets)}")
def _setup_daily_tasks(self):
"""设置每日任务的调度"""
now = datetime.now()
# 先清除所有现有的调度任务
schedule.clear()
schedule.jobs.clear()
# 重置所有任务的状态
self.task_status.clear()
# 创建一个同步的执行函数
def sync_execute_task(task_name):
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
task = loop.create_task(self.execute_task_chain(task_name))
loop.run_until_complete(asyncio.gather(task))
except Exception as e:
print(f"执行任务时发生错误: {str(e)}")
finally:
if loop and not loop.is_running():
loop.close()
for task_name, task in self.tasks.items():
if task.get('schedule_type') == 'daily':
# 从数据库获取任务的启用状态
task_data = self.db.get_main_task_by_id(task_name)
is_enabled = bool(task_data.get('enabled', False)) if task_data else False
# 只调度启用的任务
if is_enabled:
schedule_time = task.get('schedule_time')
if not schedule_time:
print(f"警告: 任务 {task_name} 没有设置调度时间")
continue
# 计算下次执行时间
next_run = self._calculate_next_run_time(schedule_time)
if next_run:
next_run_str = next_run.strftime('%Y-%m-%d %H:%M:%S')
self.db.update_task_status(task_name, {'next_run_time': next_run_str})
try:
job = schedule.every().day.at(schedule_time).do(sync_execute_task, task_name)
print(f"已设置任务 {task_name} 的调度时间: {schedule_time}")
except Exception as e:
print(f"设置任务调度失败: {str(e)}")
def add_main_task(self, task_id, task_data):
"""添加主任务"""
# 检查是否已存在同名主任务
if self.db.is_main_task(task_id):
logger.warning(f"主任务 {task_id} 已存在,无法添加")
return False
result = self.db.create_main_task(task_id, task_data)
if result:
# 更新内存中的任务集合
task = self.db.get_main_task_by_id(task_id)
if task:
self.tasks[task_id] = task
# 如果是每日任务,更新每日任务集合
if task.get('schedule_type') == 'daily':
self._setup_daily_tasks()
logger.info(f"成功添加主任务: {task_id}")
return True
return False
def add_sub_task(self, parent_id, task_id, sub_task_data):
"""添加子任务"""
if not self.db.is_main_task(parent_id):
logger.warning(f"父任务 {parent_id} 不存在或不是主任务,无法添加子任务")
return False
# 确保子任务数据中包含task_id
if 'task_id' not in sub_task_data:
sub_task_data['task_id'] = task_id
result = self.db.create_sub_task(parent_id, sub_task_data)
if result:
logger.info(f"成功为主任务 {parent_id} 添加子任务 {task_id}")
return True
return False
def update_task_dependencies(self, task_id, dependencies):
"""更新任务依赖关系"""
# 首先移除现有依赖
self.db.remove_all_task_dependencies(task_id)
# 添加新依赖
for dep in dependencies:
self.db.add_task_dependency(task_id, dep)
# 重新构建任务链
self._build_task_chains()
logger.info(f"成功更新任务 {task_id} 的依赖关系: {dependencies}")
return True
async def execute_task(self, task_id: str) -> bool:
"""执行单个任务"""
print(f"\n=== 执行任务: {task_id} ===")
print(f"当前时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
try:
# 检查任务是否存在
if task_id not in self.tasks:
print(f"错误: 任务 {task_id} 不存在")
return False
task = self.tasks[task_id]
print(f"开始执行任务: {task['name']}")
# 创建后台任务并等待其完成
task_result = await self.execute_task_chain(task_id)
print(f"任务 {task_id} 执行完成")
return task_result
except Exception as e:
print(f"执行任务时发生错误: {str(e)}")
return False
async def execute_task_chain(self, task_id: str) -> bool:
"""执行任务链,包括主任务及其子任务"""
print(f"\n=== 执行任务链: {task_id} ===")
print(f"当前时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
try:
# 检查任务是否存在
if task_id not in self.tasks:
print(f"错误: 任务 {task_id} 不存在")
return False
# 检查主任务是否启用
task_data = self.db.get_main_task_by_id(task_id)
if not task_data or not task_data.get('enabled', False):
print(f"主任务 {task_id} 已禁用,跳过执行")
return False
# 获取任务链
task_chain = self._build_chain_from_task(task_id)
if not task_chain:
print(f"错误: 无法构建任务 {task_id} 的执行链")
return False
print(f"任务链: {' -> '.join(task_chain)}")
# 依次执行任务链中的每个任务
for chain_task_id in task_chain:
print(f"\n执行链中的任务: {chain_task_id}")
# 设置当前正在执行的任务链
self.current_chain = task_id
# 如果是主任务,先执行主任务,再执行其子任务
if self.db.is_main_task(chain_task_id):
# 检查主任务是否启用
main_task_data = self.db.get_main_task_by_id(chain_task_id)
if not main_task_data or not main_task_data.get('enabled', False):
print(f"主任务 {chain_task_id} 已禁用,跳过执行")
continue
# 执行主任务
success = await self._execute_single_task(chain_task_id)
if not success:
print(f"主任务 {chain_task_id} 执行失败")
return False
# 获取子任务列表
sub_tasks = self.db.get_sub_tasks(chain_task_id)
if sub_tasks:
print(f"\n开始执行主任务 {chain_task_id} 的子任务")
# 按sequence_number排序子任务
sub_tasks.sort(key=lambda x: x.get('sequence_number', 0))
# 依次执行子任务
for sub_task in sub_tasks:
sub_task_id = sub_task['task_id']
# 检查子任务是否启用
if sub_task.get('enabled', False):
print(f"\n执行子任务: {sub_task_id}")
sub_success = await self._execute_single_task(sub_task_id, is_sub_task=True)
if not sub_success:
print(f"子任务 {sub_task_id} 执行失败")
# 记录子任务失败但继续执行其他子任务
continue
else:
print(f"子任务 {sub_task_id} 已禁用,跳过执行")
continue
else:
# 执行普通任务
success = await self._execute_single_task(chain_task_id)
if not success:
print(f"任务 {chain_task_id} 执行失败")
return False
print(f"\n任务链执行完成: {task_id}")
return True
except Exception as e:
print(f"执行任务链时发生错误: {str(e)}")
return False
finally:
# 清除当前任务链标记
self.current_chain = None
def find_next_task(self, current_task: str) -> Optional[str]:
"""查找下一个要执行的任务"""
for task_name, task in self.tasks.items():
if 'requires' in task and current_task in task['requires']:
return task_name
return None
def schedule_tasks(self):
"""设置任务调度"""
print("\n=== 开始设置任务调度 ===")
now = datetime.now()
print(f"当前时间: {now.strftime('%Y-%m-%d %H:%M:%S')}")
# 先清除所有现有的调度任务
schedule.clear()
schedule.jobs.clear() # 确保完全清除所有任务
print("已清除所有现有调度任务")
# 重置所有任务的状态
self.task_status.clear()
print(f"当前已配置的任务: {list(self.tasks.keys())}")
# 创建一个同步的执行函数
def sync_execute_task(task_name):
print(f"\n=== 调度器触发任务执行 ===")
print(f"当前时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print(f"任务名称: {task_name}")
# 获取或创建事件循环
try:
loop = asyncio.get_event_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# 在事件循环中执行异步任务
try:
# 创建任务并等待其完成
task = loop.create_task(self.execute_task_chain(task_name))
loop.run_until_complete(asyncio.gather(task))
except Exception as e:
print(f"执行任务时发生错误: {str(e)}")
finally:
# 如果我们创建了新的事件循环,需要关闭它
if loop and not loop.is_running():
loop.close()
for task_name, task in self.tasks.items():
print(f"\n--- 处理任务: {task_name} ---")
# 打印任务详细信息
print(f"任务配置: {task}")
if task.get('schedule_type') == 'daily':
# 获取任务的启用状态
task_status = self.db.get_task_status(task_name)
is_enabled = True
if task_status and 'enabled' in task_status:
is_enabled = bool(task_status['enabled'])
print(f"任务状态: {'启用' if is_enabled else '禁用'}")
# 只调度启用的任务
if is_enabled:
schedule_time = task.get('schedule_time')
if not schedule_time:
print(f"警告: 任务 {task_name} 没有设置调度时间")
continue
print(f"调度时间: {schedule_time}")
# 计算下次执行时间
next_run = self._calculate_next_run_time(schedule_time)
if next_run:
next_run_str = next_run.strftime('%Y-%m-%d %H:%M:%S')
self.db.update_task_status(task_name, {'next_run_time': next_run_str})
print(f"计算的下次执行时间: {next_run_str}")
time_diff = (next_run - now).total_seconds() / 60
print(f"距离现在: {time_diff:.1f} 分钟")
# 使用schedule库设置任务
try:
# 确保时间格式正确
if ':' not in schedule_time or len(schedule_time.split(':')) != 2:
raise ValueError(f"时间格式不正确: {schedule_time}, 应为 HH:MM 格式")
print(f"正在设置schedule任务...")
# 使用同步函数包装异步执行
job = schedule.every().day.at(schedule_time).do(
sync_execute_task, task_name
)
# 验证任务是否已正确设置
if job in schedule.jobs:
print(f"任务已成功添加到调度队列")
print(f"任务详情: {job}")
if hasattr(job, 'next_run'):
print(f"Schedule库计算的下次执行时间: {job.next_run.strftime('%Y-%m-%d %H:%M:%S')}")
else:
print(f"警告: 任务可能未成功添加到调度队列")
except Exception as e:
print(f"设置任务调度失败: {str(e)}")
else:
print(f"警告: 无法计算下次执行时间")
else:
print(f"任务已禁用,跳过调度")
elif task.get('schedule_type') == 'once':
# 检查任务是否已执行过
task_status = self.db.get_task_status(task_name)
if task_status and task_status.get('last_run_time'):
print(f"一次性任务 {task_name} 已执行过,跳过")
continue
delay = task.get('schedule_delay', 0)
print(f"设置一次性任务: {task_name}, {delay}秒后执行")
# 使用同步函数包装异步执行
def delayed_sync_execute(task_name, delay):
import time
print(f"等待{delay}秒后执行任务: {task_name}")
time.sleep(delay)
sync_execute_task(task_name)
# 在新线程中执行延迟任务
import threading
thread = threading.Thread(
target=delayed_sync_execute,
args=(task_name, delay)
)
thread.daemon = True
thread.start()
elif task.get('schedule_type') == 'interval':
# 获取任务的启用状态
task_status = self.db.get_task_status(task_name)
is_enabled = task.get('enabled', True)
if task_status and 'enabled' in task_status:
is_enabled = bool(task_status['enabled'])
print(f"任务状态: {'启用' if is_enabled else '禁用'}")
# 只调度启用的任务
if is_enabled:
interval_value = task.get('interval_value')
interval_unit = task.get('interval_unit')
if not interval_value or not interval_unit:
print(f"警告: 任务 {task_name} 没有设置有效的间隔值或单位")
continue
print(f"间隔设置: 每 {interval_value} {interval_unit}")
# 使用schedule库设置任务
try:
print(f"正在设置interval调度任务...")
# 根据interval_unit选择合适的调度方法
job = None
if interval_unit == 'minutes':
job = schedule.every(interval_value).minutes.do(sync_execute_task, task_name)
elif interval_unit == 'hours':
job = schedule.every(interval_value).hours.do(sync_execute_task, task_name)
elif interval_unit == 'days':
job = schedule.every(interval_value).days.do(sync_execute_task, task_name)
elif interval_unit == 'weeks':
job = schedule.every(interval_value).weeks.do(sync_execute_task, task_name)
else:
print(f"警告: 不支持的间隔单位: {interval_unit}")
continue
# 验证任务是否已正确设置
if job in schedule.jobs:
print(f"间隔任务已成功添加到调度队列")
print(f"任务详情: {job}")
if hasattr(job, 'next_run'):
next_run = job.next_run
print(f"Schedule库计算的下次执行时间: {next_run.strftime('%Y-%m-%d %H:%M:%S')}")
# 更新数据库中的下次执行时间
next_run_str = next_run.strftime('%Y-%m-%d %H:%M:%S')
self.db.update_task_status(task_name, {'next_run_time': next_run_str})
# 计算距离现在的时间
time_diff = (next_run - now).total_seconds() / 60
print(f"距离现在: {time_diff:.1f} 分钟")
else:
print(f"警告: 间隔任务可能未成功添加到调度队列")
except Exception as e:
print(f"设置间隔任务调度失败: {str(e)}")
import traceback
traceback.print_exc()
else:
print(f"间隔任务已禁用,跳过调度")
# 打印最终的调度状态
print("\n=== 当前调度状态 ===")
if not schedule.jobs:
print("没有已调度的任务")
else:
for job in schedule.jobs:
print(f"- {job}")
if hasattr(job, 'next_run') and job.next_run:
time_diff = (job.next_run - now).total_seconds() / 60
print(f" 下次执行时间: {job.next_run.strftime('%Y-%m-%d %H:%M:%S')}")
print(f" 距离现在: {time_diff:.1f} 分钟")
print("\n=== 任务调度设置完成 ===")
def _calculate_next_run_time(self, time_str, allow_today=True):
"""计算下次执行时间"""
try:
hour, minute = map(int, time_str.split(':'))
now = datetime.now()
today_run_time = now.replace(hour=hour, minute=minute, second=0, microsecond=0)
time_diff_minutes = (today_run_time - now).total_seconds() / 60
if time_diff_minutes > 0:
next_run = today_run_time
else:
from datetime import timedelta
next_run = today_run_time + timedelta(days=1)
return next_run
except Exception as e:
error_msg = f"计算下次执行时间失败: {str(e)}"
logger.error(error_msg, exception=e)
return None
async def run_scheduler(self):
"""运行调度器"""
print(f"\n=== 开始运行调度器 [{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] ===")
# 重新加载配置并设置任务
self.load_scheduler_config()
schedule.clear()
schedule.jobs.clear()
self.task_status.clear()
self.chain_status.clear()
self.schedule_tasks()
# 打印初始调度状态
if schedule.jobs:
print("\n当前已调度的任务:")
for job in schedule.jobs:
if hasattr(job, 'next_run') and job.next_run:
print(f"- {job}")
print(f" 下次执行时间: {job.next_run.strftime('%Y-%m-%d %H:%M:%S')}")
else:
print("\n没有已调度的任务")
self.is_running = True
last_check_minute = -1
while self.is_running:
try:
now = datetime.now()
current_minute = now.minute
# 每分钟检查一次任务状态
if current_minute != last_check_minute:
last_check_minute = current_minute
# 运行到期的任务
schedule.run_pending()
# 等待1秒
await asyncio.sleep(1)
except Exception as e:
error_msg = f"调度器运行错误: {str(e)}"
print(f"\n!!! 调度器错误: {error_msg}")
logger.error(error_msg, exception=e)
await asyncio.sleep(60) # 出错后等待60秒再重试
self.reload_scheduler()
def stop_scheduler(self):
"""停止调度器"""
self.is_running = False
# 关闭数据库连接
if hasattr(self, 'db'):
self.db.close()
def reload_scheduler(self):
"""重新加载调度配置"""
print("\n=== 重新加载调度配置 ===")
print(f"当前时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
# 清除所有现有的调度任务
schedule.clear()
schedule.jobs.clear() # 确保完全清除所有任务
print("已清除所有现有调度任务")
# 清除内部状态
self.task_status.clear()
self.chain_status.clear()
# 重新加载配置
self.load_scheduler_config()
# 重新设置调度
self.schedule_tasks()
# 打印当前的调度状态
print("\n当前调度状态:")
for job in schedule.jobs:
print(f"- {job}")
if hasattr(job, 'next_run') and job.next_run:
time_diff = (job.next_run - datetime.now()).total_seconds() / 60
print(f" 下次执行时间: {job.next_run.strftime('%Y-%m-%d %H:%M:%S')}")
print(f" 距离现在: {time_diff:.1f} 分钟")
print("\n调度配置重新加载完成")
def update_task_enabled_status(self, task_id: str, enabled: bool):
"""更新任务的启用状态并重新加载调度配置"""
if task_id not in self.tasks:
return False
# 更新数据库中的任务状态
self.db.update_task_status(task_id, {'enabled': 1 if enabled else 0})
# 重新加载调度配置
self.reload_scheduler()
return True
def update_task_schedule_time(self, task_id: str, new_time: str):
"""更新任务的调度时间"""
print(f"\n=== 更新任务调度时间 ===")
print(f"任务ID: {task_id}")
print(f"新的调度时间: {new_time}")
try:
# 1. 检查任务是否存在
if task_id not in self.tasks:
print(f"错误: 任务 {task_id} 不存在")
return False
# 2. 验证时间格式
try:
datetime.strptime(new_time, "%H:%M")
except ValueError:
print(f"错误: 无效的时间格式 {new_time},应为 HH:MM")
return False
# 3. 更新内存中的任务配置
print("更新内存中的任务配置...")
self.tasks[task_id]['schedule']['time'] = new_time
# 4. 保存到配置文件
print("保存配置到文件...")
self._save_config_to_file()
# 5. 停止当前调度器
print("停止当前调度器...")
old_running = self.is_running
self.is_running = False
schedule.clear()
schedule.jobs.clear()
# 6. 重新加载配置
print("重新加载配置...")
self.load_scheduler_config()
# 7. 重新设置所有任务的调度
print("重新设置任务调度...")
self.schedule_tasks()
# 8. 恢复调度器状态
print("恢复调度器状态...")
self.is_running = old_running
# 9. 检查新任务是否已正确设置
print("\n=== 检查新的调度设置 ===")
found = False
now = datetime.now()
for job in schedule.jobs:
if task_id in str(job):
found = True
next_run = job.next_run
time_diff = (next_run - now).total_seconds() / 60
print(f"找到任务: {job}")
print(f"下次执行时间: {next_run.strftime('%Y-%m-%d %H:%M:%S')}")
print(f"距离现在: {time_diff:.1f} 分钟")
if not found:
print(f"警告: 未找到任务 {task_id} 的新调度设置")
return False
print("\n任务调度时间更新成功")
return True
except Exception as e:
print(f"更新任务调度时间时发生错误: {str(e)}")
return False
def _save_config_to_file(self):
"""保存配置到文件"""
try:
# 使用utils中的公共函数获取配置文件路径
config_path = get_config_path('scheduler_config.yaml')
# 先读取现有配置,以保留其他设置
with open(config_path, 'r', encoding='utf-8') as f:
config = yaml.safe_load(f)
# 更新任务配置
config['tasks'] = self.tasks
# 写回文件
with open(config_path, 'w', encoding='utf-8') as f:
yaml.dump(config, f, allow_unicode=True, default_flow_style=False)
print(f"配置已保存到: {config_path}")
return True
except Exception as e:
logger.error(f"保存配置文件失败: {str(e)}")
raise
def sync_execute_task(self, task_name):
"""同步执行任务的包装函数"""
print(f"\n=== 同步执行任务: {task_name} ===")
print(f"当前时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
try:
# 获取当前事件循环,如果没有则创建新的
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
should_close_loop = True
else:
should_close_loop = False
# 在事件循环中执行任务
try:
# 创建任务并等待其完成
if loop.is_running():
try:
# 如果循环已经在运行,使用asyncio.run_coroutine_threadsafe
future = asyncio.run_coroutine_threadsafe(
self.execute_task_chain(task_name),
loop
)
result = future.result()
except RuntimeError as re:
# 处理"This event loop is already running"错误
if "This event loop is already running" in str(re):
print(f"执行任务时发生事件循环错误: {str(re)},任务将被跳过")
# 记录错误
self._record_task_failure(
task_name,
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
f"事件循环错误: {str(re)}",
"scheduler"
)
return False
else:
raise # 重新引发其他RuntimeError
else:
# 如果循环未运行,正常执行
task = asyncio.ensure_future(self.execute_task_chain(task_name), loop=loop)
result = loop.run_until_complete(task)
return result
finally:
# 只关闭我们创建的事件循环
if should_close_loop and not loop.is_running():
loop.close()
except Exception as e:
print(f"执行任务时发生错误: {str(e)}")
return False
async def _execute_single_task(self, task_id: str, is_sub_task: bool = False) -> bool:
"""执行单个任务(主任务或子任务)"""
print(f"\n=== 执行{'子' if is_sub_task else ''}任务: {task_id} ===")
start_time = datetime.now()
start_time_str = start_time.strftime('%Y-%m-%d %H:%M:%S')
triggered_by = "manual" if not hasattr(self, 'current_chain') else f"chain:{self.current_chain}"
try:
task = None
if is_sub_task:
task = self.db.get_subtask_by_id(task_id)
else:
task = self.tasks.get(task_id)
if not task:
print(f"错误: {'子' if is_sub_task else ''}任务 {task_id} 不存在")
self._record_task_failure(task_id, start_time_str, "任务不存在", triggered_by)
return False
# 确保base_url有协议前缀
base_url = self.base_url
if not base_url.startswith(('http://', 'https://')):
base_url = f"http://{base_url}"
print(f"警告: base_url未包含协议前缀,已自动添加http://前缀")
# 构建完整URL
endpoint = task['endpoint']
if endpoint.startswith('/'):
url = f"{base_url}{endpoint}"
else:
url = f"{base_url}/{endpoint}"
print(f"请求URL: {url}")
method = task.get('method', 'GET').upper()
params = task.get('params', {})
timeout = task.get('timeout', 300)
# 强制将所有调度任务设置为内部API调用类型
task['task_type'] = 'internal_api'
# 从配置中获取API密钥
try:
config = load_config()
api_security = config.get('server', {}).get('api_security', {})
api_enabled = api_security.get('enabled', False)
api_key = api_security.get('api_key', '')
logger.info(f"API安全状态: enabled={api_enabled}, API密钥长度: {len(api_key) if api_key else 0}")
print(f"API安全状态: enabled={api_enabled}, API密钥长度: {len(api_key) if api_key else 0}")
# 检查任务类型是否是内部API调用
task_type = task.get('task_type', '')
logger.info(f"任务 {task_id} 类型: {task_type}")
print(f"任务 {task_id} 类型: {task_type}")
if task_type == 'internal_api':
# 内部API调用不需要验证API密钥
logger.info(f"任务 {task_id} 是内部API调用,跳过API密钥验证")
print(f"任务 {task_id} 是内部API调用,跳过API密钥验证")
headers = {'X-Internal-Call': 'true'}
elif api_security.get('enabled', False):
# 普通任务,添加API密钥
api_key = api_security.get('api_key', '')
headers = {'X-API-Key': api_key}
logger.info(f"任务 {task_id} 已添加API密钥到请求头,密钥长度: {len(api_key)}")
print(f"任务 {task_id} 已添加API密钥到请求头,密钥长度: {len(api_key)}")
else:
# API安全验证未启用
headers = {}
logger.info(f"API安全验证未启用,不添加API密钥")
print(f"API安全验证未启用,不添加API密钥")
except Exception as e:
logger.error(f"获取API密钥失败: {str(e)}")
print(f"获取API密钥失败: {str(e)}")
headers = {}
async with httpx.AsyncClient(timeout=timeout) as client:
if method == 'GET':
response = await client.get(url, params=params, headers=headers)
else:
response = await client.post(url, json=params, headers=headers)
end_time = datetime.now()
end_time_str = end_time.strftime('%Y-%m-%d %H:%M:%S')
duration = (end_time - start_time).total_seconds()
if response.status_code == 200:
result = response.json()
if result.get("status") == "success":
print(f"任务 {task_id} 执行成功")
self.task_status[task_id] = True
self.db.record_task_execution_enhanced(
task_id=task_id,
start_time=start_time_str,
end_time=end_time_str,
duration=duration,
status="success",
triggered_by=triggered_by,
output=str(result)
)
return True
else:
error_msg = result.get('message', '未知错误')
print(f"任务 {task_id} 执行失败: {error_msg}")
self._record_task_failure(task_id, start_time_str, error_msg, triggered_by)
return False
else:
error_msg = f"请求失败: {response.status_code}"
print(f"任务 {task_id} 请求失败: {response.status_code}")
self._record_task_failure(task_id, start_time_str, error_msg, triggered_by)
return False
except Exception as e:
error_msg = str(e)
print(f"执行任务时发生错误: {error_msg}")
self._record_task_failure(task_id, start_time_str, error_msg, triggered_by)
return False
def _record_task_failure(self, task_id, start_time_str, error_msg, triggered_by):
"""记录任务失败信息"""
end_time = datetime.now()
end_time_str = end_time.strftime('%Y-%m-%d %H:%M:%S')
duration = (datetime.strptime(end_time_str, '%Y-%m-%d %H:%M:%S') -
datetime.strptime(start_time_str, '%Y-%m-%d %H:%M:%S')).total_seconds()
self.task_status[task_id] = False
self.db.record_task_execution_enhanced(
task_id=task_id,
start_time=start_time_str,
end_time=end_time_str,
duration=duration,
status="fail",
error_message=error_msg,
triggered_by=triggered_by
)
def _build_chain_from_task(self, task_id: str) -> List[str]:
"""根据任务ID构建执行链
Args:
task_id: 任务ID
Returns:
List[str]: 任务执行顺序列表
"""
print(f"构建任务 {task_id} 的执行链")
try:
# 如果任务不存在,返回空列表
if task_id not in self.tasks:
print(f"任务 {task_id} 不存在")
return []
# 获取任务的依赖项
dependencies = self.db.get_task_dependencies(task_id)
print(f"任务 {task_id} 的依赖项: {dependencies}")
# 如果没有依赖,只返回当前任务
if not dependencies:
print(f"任务 {task_id} 没有依赖项,返回单任务链")
return [task_id]
# 构建完整的执行链
execution_chain = []
# 首先添加所有依赖任务
for dep in dependencies:
# 递归构建依赖任务的执行链
dep_chain = self._build_chain_from_task(dep)
# 添加未包含的任务
for task in dep_chain:
if task not in execution_chain:
execution_chain.append(task)
# 最后添加当前任务
if task_id not in execution_chain:
execution_chain.append(task_id)
print(f"任务 {task_id} 的执行链: {' -> '.join(execution_chain)}")
return execution_chain
except Exception as e:
print(f"构建任务链时发生错误: {str(e)}")
return [task_id] # 出错时至少返回当前任务
def delete_main_task(self, task_id: str) -> bool:
"""删除主任务"""
if task_id not in self.tasks:
logger.warning(f"任务 {task_id} 不存在")
return False
result = self.db.delete_main_task(task_id)
if result:
# 从内存中删除任务
self.tasks.pop(task_id, None)
# 如果是每日任务,也从每日任务集合中删除
self.daily_tasks.pop(task_id, None)
# 重新构建任务链
self._build_task_chains()
logger.info(f"成功删除主任务: {task_id}")
return True
return False
def _check_scheduled_tasks(self):
"""检查所有计划任务,执行到期的任务"""
try:
cursor = self.db.conn.cursor()
# 获取所有启用的主任务
cursor.execute("""
SELECT task_id, name, endpoint, method, params, schedule_type,
schedule_time, schedule_delay, interval_value, interval_unit,
last_executed, last_status
FROM main_tasks
WHERE enabled = 1 AND task_type = 'main'
""")
tasks = cursor.fetchall()
current_time = datetime.now()
for task in tasks:
task_id, name, endpoint, method, params, schedule_type, schedule_time, schedule_delay, interval_value, interval_unit, last_executed, last_status = task
should_execute = False
# 解析上次执行时间
if last_executed:
try:
last_exec_time = datetime.fromisoformat(last_executed)
except ValueError:
last_exec_time = None
else:
last_exec_time = None
# 检查是否应该执行
if schedule_type == 'daily':
# 每日任务
if schedule_time and not self._is_executed_today(task_id):
scheduled_time = self._parse_schedule_time(schedule_time)
if scheduled_time and current_time.time() >= scheduled_time:
should_execute = True
logger.info(f"每日任务 '{task_id}' 达到执行时间 {schedule_time}")
elif schedule_type == 'once':
# 一次性任务
if not last_executed and schedule_delay is not None:
# 任务还未执行且设置了延迟
creation_time = self._get_task_creation_time(task_id)
if creation_time:
scheduled_time = creation_time + timedelta(seconds=schedule_delay)
if current_time >= scheduled_time:
should_execute = True
logger.info(f"一次性任务 '{task_id}' 达到执行时间")
elif schedule_type == 'interval':
# 间隔执行任务
if interval_value is not None and interval_unit and last_exec_time:
# 计算下次执行时间
next_exec_time = self._calculate_next_interval_execution(
last_exec_time, interval_value, interval_unit
)
if current_time >= next_exec_time:
should_execute = True
logger.info(f"间隔任务 '{task_id}' 达到执行时间 (每 {interval_value} {interval_unit})")
elif interval_value is not None and interval_unit and not last_exec_time:
# 间隔任务还未执行过,查看是否设置了初始延迟
if schedule_delay is not None:
creation_time = self._get_task_creation_time(task_id)
if creation_time:
scheduled_time = creation_time + timedelta(seconds=schedule_delay)
if current_time >= scheduled_time:
should_execute = True
logger.info(f"间隔任务 '{task_id}' 首次执行 (每 {interval_value} {interval_unit})")
else:
# 获取任务创建时间,从创建时间开始计算第一次执行时间
creation_time = self._get_task_creation_time(task_id)
if creation_time:
# 使用创建时间作为基准计算下次执行时间
next_exec_time = self._calculate_next_interval_execution(
creation_time, interval_value, interval_unit
)
if current_time >= next_exec_time:
should_execute = True
logger.info(f"间隔任务 '{task_id}' 首次执行时间到达 (每 {interval_value} {interval_unit})")
else:
logger.info(f"间隔任务 '{task_id}' 等待首次执行时间 {next_exec_time}")
# 执行任务
if should_execute:
thread = threading.Thread(
target=self._execute_task_wrapper,
args=(task_id, name, endpoint, method, params),
daemon=True
)
thread.start()
return True
except Exception as e:
logger.error(f"检查计划任务出错: {str(e)}")
return False
def _calculate_next_interval_execution(self, last_exec_time, interval_value, interval_unit):
"""计算间隔任务的下次执行时间"""
if not isinstance(interval_value, int) or interval_value <= 0:
logger.warning(f"无效的间隔值: {interval_value}")
return datetime.max # 返回一个极远的未来时间,防止任务被执行
try:
if interval_unit == 'minutes':
return last_exec_time + timedelta(minutes=interval_value)
elif interval_unit == 'hours':
return last_exec_time + timedelta(hours=interval_value)
elif interval_unit == 'days':
return last_exec_time + timedelta(days=interval_value)
elif interval_unit == 'weeks':
return last_exec_time + timedelta(weeks=interval_value)
elif interval_unit == 'months':
# Python的timedelta没有months,手动计算
year = last_exec_time.year
month = last_exec_time.month + interval_value
# 处理月份溢出
while month > 12:
month -= 12
year += 1
# 处理月份天数问题(例如,1月31日 + 1个月)
day = min(last_exec_time.day, calendar.monthrange(year, month)[1])
return last_exec_time.replace(year=year, month=month, day=day)
elif interval_unit == 'years':
# 处理闰年问题
year = last_exec_time.year + interval_value
month = last_exec_time.month
day = min(last_exec_time.day, calendar.monthrange(year, month)[1])
return last_exec_time.replace(year=year, day=day)
else:
logger.warning(f"不支持的间隔单位: {interval_unit}")
return datetime.max
except Exception as e:
logger.error(f"计算下次执行时间出错: {str(e)}")
return datetime.max
def _get_task_creation_time(self, task_id: str) -> Optional[datetime]:
"""获取任务的创建时间"""
try:
cursor = self.db.conn.cursor()
cursor.execute("SELECT created_at FROM main_tasks WHERE task_id = ?", (task_id,))
row = cursor.fetchone()
if row and row[0]:
try:
# 尝试解析ISO格式的时间字符串
return datetime.fromisoformat(row[0].replace('Z', '+00:00'))
except ValueError:
# 如果不是ISO格式,尝试其他格式
formats = [
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%d %H:%M:%S.%f'
]
for fmt in formats:
try:
return datetime.strptime(row[0], fmt)
except ValueError:
continue
# 如果所有格式都不匹配,记录错误并返回当前时间
logger.error(f"无法解析任务 {task_id} 的创建时间: {row[0]}")
return datetime.now()
else:
logger.warning(f"未找到任务 {task_id} 的创建时间,使用当前时间")
return datetime.now()
except Exception as e:
logger.error(f"获取任务创建时间出错: {str(e)}")
return datetime.now()
def _execute_task_wrapper(self, task_id, name, endpoint, method, params):
"""执行任务并更新下次执行时间(针对计划任务)"""
print(f"\n=== 执行计划任务: {task_id} ===")
print(f"当前时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print(f"任务名称: {name}")
print(f"接口: {method} {endpoint}")
try:
# 记录任务开始执行
start_time = datetime.now()
start_time_str = start_time.strftime('%Y-%m-%d %H:%M:%S')
# 获取任务配置,特别是计划类型
task_config = self.db.get_main_task_by_id(task_id)
if not task_config:
print(f"错误: 任务 {task_id} 不存在")
return
schedule_type = task_config.get('schedule_type')
# 执行任务
try:
# 使用异步方式执行任务
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
task = loop.create_task(self.execute_task_chain(task_id))
result = loop.run_until_complete(asyncio.gather(task))[0]
loop.close()
# 记录执行结果
end_time = datetime.now()
end_time_str = end_time.strftime('%Y-%m-%d %H:%M:%S')
duration = (end_time - start_time).total_seconds()
# 为interval类型的任务计算下次执行时间
next_run_time = None
if schedule_type == 'interval':
interval_value = task_config.get('interval_value')
interval_unit = task_config.get('interval_unit')
if interval_value and interval_unit:
try:
# 使用end_time计算下次执行时间
next_exec_time = self._calculate_next_interval_execution(
end_time, interval_value, interval_unit
)
next_run_time = next_exec_time.strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"计算得到间隔任务下次执行时间: {next_run_time}")
# 更新数据库中的下次执行时间
self.db.update_task_status(task_id, {'next_run_time': next_run_time})
print(f"已更新任务 {task_id} 的下次执行时间: {next_run_time}")
except Exception as e:
logger.error(f"计算间隔任务下次执行时间失败: {str(e)}")
print(f"任务执行{'成功' if result else '失败'}")
# 记录成功的执行结果
if result:
self.db.record_task_execution_enhanced(
task_id=task_id,
start_time=start_time_str,
end_time=end_time_str,
duration=duration,
status="success",
triggered_by="scheduler",
next_run_time=next_run_time
)
else:
self.db.record_task_execution_enhanced(
task_id=task_id,
start_time=start_time_str,
end_time=end_time_str,
duration=duration,
status="fail",
error_message="任务执行失败",
triggered_by="scheduler",
next_run_time=next_run_time
)
except Exception as e:
error_msg = str(e)
print(f"执行任务时出错: {error_msg}")
traceback.print_exc()
# 记录执行失败
end_time = datetime.now()
end_time_str = end_time.strftime('%Y-%m-%d %H:%M:%S')
duration = (end_time - start_time).total_seconds()
self.db.record_task_execution_enhanced(
task_id=task_id,
start_time=start_time_str,
end_time=end_time_str,
duration=duration,
status="fail",
error_message=error_msg,
triggered_by="scheduler"
)
except Exception as e:
print(f"执行任务包装器出错: {str(e)}")
traceback.print_exc()
async def send_error_notification(error_message):
"""发送错误通知邮件"""
from scripts.send_log_email import send_email
# 直接使用send_email函数,它会自动从配置文件加载邮件设置
subject = "Bilibili历史记录分析任务执行出错"
body = f"""
执行时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
错误信息: {error_message}
"""
try:
await send_email(
subject=subject,
content=body
)
except Exception as e:
print(f"发送错误通知邮件失败: {e}")
logger.error(f"发送错误通知邮件失败: {e}", exception=e)
|
2929004360/ruoyi-sign
| 2,412
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/entity/SysDictType.java
|
package com.ruoyi.common.core.domain.entity;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.ruoyi.common.annotation.Excel;
import com.ruoyi.common.annotation.Excel.ColumnType;
import com.ruoyi.common.core.domain.BaseEntity;
/**
* 字典类型表 sys_dict_type
*
* @author ruoyi
*/
public class SysDictType extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 字典主键 */
@Excel(name = "字典主键", cellType = ColumnType.NUMERIC)
private Long dictId;
/** 字典名称 */
@Excel(name = "字典名称")
private String dictName;
/** 字典类型 */
@Excel(name = "字典类型")
private String dictType;
/** 状态(0正常 1停用) */
@Excel(name = "状态", readConverterExp = "0=正常,1=停用")
private String status;
public Long getDictId()
{
return dictId;
}
public void setDictId(Long dictId)
{
this.dictId = dictId;
}
@NotBlank(message = "字典名称不能为空")
@Size(min = 0, max = 100, message = "字典类型名称长度不能超过100个字符")
public String getDictName()
{
return dictName;
}
public void setDictName(String dictName)
{
this.dictName = dictName;
}
@NotBlank(message = "字典类型不能为空")
@Size(min = 0, max = 100, message = "字典类型类型长度不能超过100个字符")
@Pattern(regexp = "^[a-z][a-z0-9_]*$", message = "字典类型必须以字母开头,且只能为(小写字母,数字,下滑线)")
public String getDictType()
{
return dictType;
}
public void setDictType(String dictType)
{
this.dictType = dictType;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("dictId", getDictId())
.append("dictName", getDictName())
.append("dictType", getDictType())
.append("status", getStatus())
.append("createBy", getCreateBy())
.append("createTime", getCreateTime())
.append("updateBy", getUpdateBy())
.append("updateTime", getUpdateTime())
.append("remark", getRemark())
.toString();
}
}
|
2929004360/ruoyi-sign
| 5,597
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/entity/SysMenu.java
|
package com.ruoyi.common.core.domain.entity;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.ruoyi.common.core.domain.BaseEntity;
/**
* 菜单权限表 sys_menu
*
* @author ruoyi
*/
public class SysMenu extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 菜单ID */
private Long menuId;
/** 菜单名称 */
private String menuName;
/** 父菜单名称 */
private String parentName;
/** 父菜单ID */
private Long parentId;
/** 显示顺序 */
private Integer orderNum;
/** 路由地址 */
private String path;
/** 组件路径 */
private String component;
/** 路由参数 */
private String query;
/** 路由名称,默认和路由地址相同的驼峰格式(注意:因为vue3版本的router会删除名称相同路由,为避免名字的冲突,特殊情况可以自定义) */
private String routeName;
/** 是否为外链(0是 1否) */
private String isFrame;
/** 是否缓存(0缓存 1不缓存) */
private String isCache;
/** 类型(M目录 C菜单 F按钮) */
private String menuType;
/** 显示状态(0显示 1隐藏) */
private String visible;
/** 菜单状态(0正常 1停用) */
private String status;
/**
* 查询流程(0=否,1=是)
*/
private String isFlow;
/** 权限字符串 */
private String perms;
/** 菜单图标 */
private String icon;
/** 子菜单 */
private List<SysMenu> children = new ArrayList<SysMenu>();
public Long getMenuId()
{
return menuId;
}
public void setMenuId(Long menuId)
{
this.menuId = menuId;
}
@NotBlank(message = "菜单名称不能为空")
@Size(min = 0, max = 50, message = "菜单名称长度不能超过50个字符")
public String getMenuName()
{
return menuName;
}
public void setMenuName(String menuName)
{
this.menuName = menuName;
}
public String getParentName()
{
return parentName;
}
public void setParentName(String parentName)
{
this.parentName = parentName;
}
public Long getParentId()
{
return parentId;
}
public void setParentId(Long parentId)
{
this.parentId = parentId;
}
@NotNull(message = "显示顺序不能为空")
public Integer getOrderNum()
{
return orderNum;
}
public void setOrderNum(Integer orderNum)
{
this.orderNum = orderNum;
}
@Size(min = 0, max = 200, message = "路由地址不能超过200个字符")
public String getPath()
{
return path;
}
public void setPath(String path)
{
this.path = path;
}
@Size(min = 0, max = 200, message = "组件路径不能超过255个字符")
public String getComponent()
{
return component;
}
public void setComponent(String component)
{
this.component = component;
}
public String getQuery()
{
return query;
}
public void setQuery(String query)
{
this.query = query;
}
public String getRouteName()
{
return routeName;
}
public void setRouteName(String routeName)
{
this.routeName = routeName;
}
public String getIsFrame()
{
return isFrame;
}
public void setIsFrame(String isFrame)
{
this.isFrame = isFrame;
}
public String getIsCache()
{
return isCache;
}
public void setIsCache(String isCache)
{
this.isCache = isCache;
}
@NotBlank(message = "菜单类型不能为空")
public String getMenuType()
{
return menuType;
}
public void setMenuType(String menuType)
{
this.menuType = menuType;
}
public String getVisible()
{
return visible;
}
public void setVisible(String visible)
{
this.visible = visible;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
@Size(min = 0, max = 100, message = "权限标识长度不能超过100个字符")
public String getPerms()
{
return perms;
}
public void setPerms(String perms)
{
this.perms = perms;
}
public String getIcon()
{
return icon;
}
public void setIcon(String icon)
{
this.icon = icon;
}
public List<SysMenu> getChildren()
{
return children;
}
public void setChildren(List<SysMenu> children)
{
this.children = children;
}
public String getIsFlow() {
return isFlow;
}
public void setIsFlow(String isFlow) {
this.isFlow = isFlow;
}
@Override
public String toString() {
return "SysMenu{" +
"menuId=" + menuId +
", menuName='" + menuName + '\'' +
", parentName='" + parentName + '\'' +
", parentId=" + parentId +
", orderNum=" + orderNum +
", path='" + path + '\'' +
", component='" + component + '\'' +
", query='" + query + '\'' +
", routeName='" + routeName + '\'' +
", isFrame='" + isFrame + '\'' +
", isCache='" + isCache + '\'' +
", menuType='" + menuType + '\'' +
", visible='" + visible + '\'' +
", status='" + status + '\'' +
", isFlow='" + isFlow + '\'' +
", perms='" + perms + '\'' +
", icon='" + icon + '\'' +
", children=" + children +
'}';
}
}
|
2977094657/BilibiliHistoryFetcher
| 2,362
|
scripts/clean_data.py
|
import json
import os
from scripts.utils import load_config, get_base_path, get_output_path
config = load_config()
def clean_history_data():
full_input_folder = get_output_path('history_by_date')
full_output_folder = get_output_path('cleaned_history_by_date')
if not os.path.exists(full_input_folder):
print(f"输入文件夹 '{full_input_folder}' 不存在。")
return {"status": "error", "message": f"输入文件夹 '{full_input_folder}' 不存在。"}
cleaned_files = 0
for year in os.listdir(full_input_folder):
year_path = os.path.join(full_input_folder, year)
if os.path.isdir(year_path) and year.isdigit():
for month in os.listdir(year_path):
month_path = os.path.join(year_path, month)
if os.path.isdir(month_path) and month.isdigit():
for day_file in os.listdir(month_path):
if day_file.endswith('.json'):
input_file = os.path.join(month_path, day_file)
output_file = os.path.join(full_output_folder, year, month, day_file)
# 确保输出目录存在
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(input_file, 'r', encoding='utf-8') as f:
data = json.load(f)
# 清理数据
cleaned_data = clean_data(data, config['fields_to_remove'])
# 保存清理后的数据
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(cleaned_data, f, ensure_ascii=False, indent=4)
cleaned_files += 1
message = f"数据清理完成。共处理 {cleaned_files} 个文件。"
return {"status": "success", "message": message}
def clean_data(data, fields_to_remove):
cleaned_data = []
for item in data:
cleaned_item = {key: value for key, value in item.items() if key not in fields_to_remove}
if 'history' in item:
cleaned_item['history'] = {key: value for key, value in item['history'].items() if key not in fields_to_remove}
cleaned_data.append(cleaned_item)
return cleaned_data
if __name__ == "__main__":
result = clean_history_data()
print(result["message"])
|
2929004360/ruoyi-sign
| 1,160
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/model/LoginBody.java
|
package com.ruoyi.common.core.domain.model;
/**
* 用户登录对象
*
* @author ruoyi
*/
public class LoginBody
{
/**
* 用户名
*/
private String username;
/**
* 用户密码
*/
private String password;
/**
* 验证码
*/
private String code;
/**
* 唯一标识
*/
private String uuid;
/**
* 公众号code
*/
private String publicCode;
public String getUsername()
{
return username;
}
public void setUsername(String username)
{
this.username = username;
}
public String getPassword()
{
return password;
}
public void setPassword(String password)
{
this.password = password;
}
public String getCode()
{
return code;
}
public void setCode(String code)
{
this.code = code;
}
public String getUuid()
{
return uuid;
}
public void setUuid(String uuid)
{
this.uuid = uuid;
}
public String getPublicCode() {
return publicCode;
}
public void setPublicCode(String publicCode) {
this.publicCode = publicCode;
}
}
|
2977094657/BilibiliHistoryFetcher
| 18,955
|
scripts/init_categories.py
|
import sqlite3
from scripts.utils import get_output_path, load_config
def init_categories():
"""初始化视频分类表"""
config = load_config()
db_path = get_output_path(config['db_file'])
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
try:
# 创建视频分类表
cursor.execute('''
CREATE TABLE IF NOT EXISTS video_categories (
id INTEGER PRIMARY KEY AUTOINCREMENT,
main_category TEXT NOT NULL,
sub_category TEXT NOT NULL,
alias TEXT NOT NULL,
tid INTEGER NOT NULL,
image TEXT
)
''')
# 插入数据
categories_data = [
('动画', '动画', 'douga', 1, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg'),
('动画', 'MAD·AMV', 'mad', 24, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg' ),
('动画', 'MMD·3D', 'mmd', 25, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg'),
('动画', '短片·手书', 'handdrawn', 47, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg'),
('动画', '配音', 'voice', 257, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg'),
('动画', '手办·模玩', 'garage_kit', 210, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg'),
('动画', '特摄', 'tokusatsu', 86, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg'),
('动画', '动漫杂谈', 'acgntalks', 253, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg'),
('动画', '综合', 'other', 27, 'https://socialsisteryi.github.io/bilibili-API-collect/assets/douga-551968c9.svg'),
('番剧', '番剧', 'anime', 13,'https://socialsisteryi.github.io/bilibili-API-collect/assets/anime-b33a4df8.svg'),
('番剧', '资讯', 'information', 51,'https://socialsisteryi.github.io/bilibili-API-collect/assets/anime-b33a4df8.svg'),
('番剧', '官方延伸', 'offical', 152,'https://socialsisteryi.github.io/bilibili-API-collect/assets/anime-b33a4df8.svg'),
('番剧', '完结动画', 'finish', 32,'https://socialsisteryi.github.io/bilibili-API-collect/assets/anime-b33a4df8.svg'),
('番剧', '连载动画', 'serial', 33,'https://socialsisteryi.github.io/bilibili-API-collect/assets/anime-b33a4df8.svg'),
('国创', '国创', 'guochuang', 167,'https://socialsisteryi.github.io/bilibili-API-collect/assets/guochuang-2887858d.svg'),
('国创', '国产动画', 'chinese', 153,'https://socialsisteryi.github.io/bilibili-API-collect/assets/guochuang-2887858d.svg'),
('国创', '国产原创相关', 'original', 168,'https://socialsisteryi.github.io/bilibili-API-collect/assets/guochuang-2887858d.svg'),
('国创', '布袋戏', 'puppetry', 169,'https://socialsisteryi.github.io/bilibili-API-collect/assets/guochuang-2887858d.svg'),
('国创', '资讯', 'information', 170,'https://socialsisteryi.github.io/bilibili-API-collect/assets/guochuang-2887858d.svg'),
('国创', '动态漫·广播剧', 'motioncomic', 195,'https://socialsisteryi.github.io/bilibili-API-collect/assets/guochuang-2887858d.svg'),
('音乐', '音乐', 'music', 3,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', '原创音乐', 'original', 28,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', '翻唱', 'cover', 31,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', 'VOCALOID·UTAU', 'vocaloid', 30,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', '演奏', 'perform', 59,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', 'MV', 'mv', 193,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', '音乐现场', 'live', 29,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', '音乐综合', 'other', 130,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', '乐评盘点', 'commentary', 243,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('音乐', '音乐教学', 'tutorial', 244,'https://socialsisteryi.github.io/bilibili-API-collect/assets/music-1d6aa097.svg'),
('舞蹈', '舞蹈', 'dance', 129,'https://socialsisteryi.github.io/bilibili-API-collect/assets/dance-26e4156b.svg'),
('舞蹈', '宅舞', 'otaku', 20,'https://socialsisteryi.github.io/bilibili-API-collect/assets/dance-26e4156b.svg'),
('舞蹈', '舞蹈综合', 'three_d', 154,'https://socialsisteryi.github.io/bilibili-API-collect/assets/dance-26e4156b.svg'),
('舞蹈', '舞蹈教程', 'demo', 156,'https://socialsisteryi.github.io/bilibili-API-collect/assets/dance-26e4156b.svg'),
('舞蹈', '街舞', 'hiphop', 198,'https://socialsisteryi.github.io/bilibili-API-collect/assets/dance-26e4156b.svg'),
('舞蹈', '明星舞蹈', 'star', 199,'https://socialsisteryi.github.io/bilibili-API-collect/assets/dance-26e4156b.svg'),
('舞蹈', '国风舞蹈', 'china', 200,'https://socialsisteryi.github.io/bilibili-API-collect/assets/dance-26e4156b.svg'),
('舞蹈', '手势·网红舞', 'gestures', 255,'https://socialsisteryi.github.io/bilibili-API-collect/assets/dance-26e4156b.svg'),
('游戏', '游戏', 'game', 4,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('游戏', '单机游戏', 'stand_alone', 17,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('游戏', '电子竞技', 'esports', 171,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('游戏', '手机游戏', 'mobile', 172,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('游戏', '网络游戏', 'online', 65,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('游戏', '桌游棋牌', 'board', 173,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('游戏', 'GMV', 'gmv', 121,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('游戏', '音游', 'music', 136,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('游戏', 'Mugen', 'mugen', 19,'https://socialsisteryi.github.io/bilibili-API-collect/assets/game-158a0730.svg'),
('知识', '知识', 'knowledge', 36,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('知识', '科学科普', 'science', 201,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('知识', '社科·法律·心理', 'social_science', 124,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('知识', '人文历史', 'humanity_history', 228,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('知识', '财经商业', 'business', 207,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('知识', '校园学习', 'campus', 208,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('知识', '职业职场', 'career', 209,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('知识', '设计·创意', 'design', 229,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('知识', '野生技术协会', 'skill', 122,'https://socialsisteryi.github.io/bilibili-API-collect/assets/knowledge-65fd8dce.svg'),
('科技', '科技', 'tech', 188,'https://socialsisteryi.github.io/bilibili-API-collect/assets/tech-8f2eb72e.svg'),
('科技', '数码', 'digital', 95,'https://socialsisteryi.github.io/bilibili-API-collect/assets/tech-8f2eb72e.svg'),
('科技', '软件应用', 'application', 230,'https://socialsisteryi.github.io/bilibili-API-collect/assets/tech-8f2eb72e.svg'),
('科技', '计算机技术', 'computer_tech', 231,'https://socialsisteryi.github.io/bilibili-API-collect/assets/tech-8f2eb72e.svg'),
('科技', '科工机械', 'industry', 232,'https://socialsisteryi.github.io/bilibili-API-collect/assets/tech-8f2eb72e.svg'),
('科技', '极客DIY', 'diy', 233,'https://socialsisteryi.github.io/bilibili-API-collect/assets/tech-8f2eb72e.svg'),
('运动', '运动', 'sports', 234,'https://socialsisteryi.github.io/bilibili-API-collect/assets/sports-bfc825f3.svg'),
('运动', '篮球', 'basketball', 235,'https://socialsisteryi.github.io/bilibili-API-collect/assets/sports-bfc825f3.svg'),
('运动', '足球', 'football', 249,'https://socialsisteryi.github.io/bilibili-API-collect/assets/sports-bfc825f3.svg'),
('运动', '健身', 'aerobics', 164,'https://socialsisteryi.github.io/bilibili-API-collect/assets/sports-bfc825f3.svg'),
('运动', '竞技体育', 'athletic', 236,'https://socialsisteryi.github.io/bilibili-API-collect/assets/sports-bfc825f3.svg'),
('运动', '运动文化', 'culture', 237,'https://socialsisteryi.github.io/bilibili-API-collect/assets/sports-bfc825f3.svg'),
('运动', '运动综合', 'comprehensive', 238,'https://socialsisteryi.github.io/bilibili-API-collect/assets/sports-bfc825f3.svg'),
('汽车', '汽车', 'car', 223,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('汽车', '汽车知识科普', 'knowledge', 258,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('汽车', '赛车', 'racing', 245,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('汽车', '改装玩车', 'modifiedvehicle', 246,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('汽车', '新能源汽车', 'newenergyvehicle', 247,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('汽车', '房车', 'touringcar', 248,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('汽车', '摩托车', 'motorcycle', 240,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('汽车', '购车攻略', 'strategy', 227,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('汽车', '汽车生活', 'life', 176,'https://socialsisteryi.github.io/bilibili-API-collect/assets/car-c766485c.svg'),
('生活', '生活', 'life', 160,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('生活', '搞笑', 'funny', 138,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('生活', '出行', 'travel', 250,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('生活', '三农', 'rurallife', 251,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('生活', '家居房产', 'home', 239,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('生活', '手工', 'handmake', 161,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('生活', '绘画', 'painting', 162,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('生活', '日常', 'daily', 21,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('生活', '亲子', 'parenting', 254,'https://socialsisteryi.github.io/bilibili-API-collect/assets/life-1f4a6ef5.svg'),
('美食', '美食', 'food', 211,'https://socialsisteryi.github.io/bilibili-API-collect/assets/food-5883d8d8.svg'),
('美食', '美食制作', 'make', 76,'https://socialsisteryi.github.io/bilibili-API-collect/assets/food-5883d8d8.svg'),
('美食', '美食侦探', 'detective', 212,'https://socialsisteryi.github.io/bilibili-API-collect/assets/food-5883d8d8.svg'),
('美食', '美食测评', 'measurement', 213,'https://socialsisteryi.github.io/bilibili-API-collect/assets/food-5883d8d8.svg'),
('美食', '田园美食', 'rural', 214,'https://socialsisteryi.github.io/bilibili-API-collect/assets/food-5883d8d8.svg'),
('美食', '美食记录', 'record', 215,'https://socialsisteryi.github.io/bilibili-API-collect/assets/food-5883d8d8.svg'),
('动物圈', '动物圈', 'animal', 217,'https://socialsisteryi.github.io/bilibili-API-collect/assets/animal-95ff87f2.svg'),
('动物圈', '喵星人', 'cat', 218,'https://socialsisteryi.github.io/bilibili-API-collect/assets/animal-95ff87f2.svg'),
('动物圈', '汪星人', 'dog', 219,'https://socialsisteryi.github.io/bilibili-API-collect/assets/animal-95ff87f2.svg'),
('动物圈', '动物二创', 'second_edition', 220,'https://socialsisteryi.github.io/bilibili-API-collect/assets/animal-95ff87f2.svg'),
('动物圈', '野生动物', 'wild_animal', 221,'https://socialsisteryi.github.io/bilibili-API-collect/assets/animal-95ff87f2.svg'),
('动物圈', '小宠异宠', 'reptiles', 222,'https://socialsisteryi.github.io/bilibili-API-collect/assets/animal-95ff87f2.svg'),
('动物圈', '动物综合', 'animal_composite', 75,'https://socialsisteryi.github.io/bilibili-API-collect/assets/animal-95ff87f2.svg'),
('鬼畜', '鬼畜', 'kichiku', 119,'https://socialsisteryi.github.io/bilibili-API-collect/assets/kichiku-8f960ae2.svg'),
('鬼畜', '鬼畜调教', 'guide', 22,'https://socialsisteryi.github.io/bilibili-API-collect/assets/kichiku-8f960ae2.svg'),
('鬼畜', '音MAD', 'mad', 26,'https://socialsisteryi.github.io/bilibili-API-collect/assets/kichiku-8f960ae2.svg'),
('鬼畜', '人力VOCALOID', 'manual_vocaloid', 126,'https://socialsisteryi.github.io/bilibili-API-collect/assets/kichiku-8f960ae2.svg'),
('鬼畜', '鬼畜剧场', 'theatre', 216,'https://socialsisteryi.github.io/bilibili-API-collect/assets/kichiku-8f960ae2.svg'),
('鬼畜', '教程演示', 'course', 127,'https://socialsisteryi.github.io/bilibili-API-collect/assets/kichiku-8f960ae2.svg'),
('时尚', '时尚', 'fashion', 155,'https://socialsisteryi.github.io/bilibili-API-collect/assets/fashion-773241bb.svg'),
('时尚', '美妆护肤', 'makeup', 157,'https://socialsisteryi.github.io/bilibili-API-collect/assets/fashion-773241bb.svg'),
('时尚', '仿妆cos', 'cos', 252,'https://socialsisteryi.github.io/bilibili-API-collect/assets/fashion-773241bb.svg'),
('时尚', '穿搭', 'clothing', 158,'https://socialsisteryi.github.io/bilibili-API-collect/assets/fashion-773241bb.svg'),
('时尚', '时尚潮流', 'catwalk', 159,'https://socialsisteryi.github.io/bilibili-API-collect/assets/fashion-773241bb.svg'),
('资讯', '资讯', 'information', 202,'https://socialsisteryi.github.io/bilibili-API-collect/assets/information-d98c5ed0.svg'),
('资讯', '热点', 'hotspot', 203,'https://socialsisteryi.github.io/bilibili-API-collect/assets/information-d98c5ed0.svg'),
('资讯', '环球', 'global', 204,'https://socialsisteryi.github.io/bilibili-API-collect/assets/information-d98c5ed0.svg'),
('资讯', '社会', 'social', 205,'https://socialsisteryi.github.io/bilibili-API-collect/assets/information-d98c5ed0.svg'),
('资讯', '综合', 'multiple', 206,'https://socialsisteryi.github.io/bilibili-API-collect/assets/information-d98c5ed0.svg'),
('娱乐', '娱乐', 'ent', 5,'https://socialsisteryi.github.io/bilibili-API-collect/assets/ent-ed6247e0.svg'),
('娱乐', '综艺', 'variety', 71,'https://socialsisteryi.github.io/bilibili-API-collect/assets/ent-ed6247e0.svg'),
('娱乐', '娱乐杂谈', 'talker', 241,'https://socialsisteryi.github.io/bilibili-API-collect/assets/ent-ed6247e0.svg'),
('娱乐', '粉丝创作', 'fans', 242,'https://socialsisteryi.github.io/bilibili-API-collect/assets/ent-ed6247e0.svg'),
('娱乐', '明星综合', 'celebrity', 137,'https://socialsisteryi.github.io/bilibili-API-collect/assets/ent-ed6247e0.svg'),
('影视', '影视', 'cinephile', 181,'https://socialsisteryi.github.io/bilibili-API-collect/assets/cinephile-c8d74b94.svg'),
('影视', '影视杂谈', 'cinecism', 182,'https://socialsisteryi.github.io/bilibili-API-collect/assets/cinephile-c8d74b94.svg'),
('影视', '影视剪辑', 'montage', 183,'https://socialsisteryi.github.io/bilibili-API-collect/assets/cinephile-c8d74b94.svg'),
('影视', '小剧场', 'shortfilm', 85,'https://socialsisteryi.github.io/bilibili-API-collect/assets/cinephile-c8d74b94.svg'),
('影视', '预告·资讯', 'trailer_info', 184,'https://socialsisteryi.github.io/bilibili-API-collect/assets/cinephile-c8d74b94.svg'),
('影视', '短片', 'shortfilm', 256,'https://socialsisteryi.github.io/bilibili-API-collect/assets/cinephile-c8d74b94.svg'),
('纪录片', '纪录片', 'documentary', 177,'https://socialsisteryi.github.io/bilibili-API-collect/assets/documentary-2c550e67.svg'),
('纪录片', '人文·历史', 'history', 37,'https://socialsisteryi.github.io/bilibili-API-collect/assets/documentary-2c550e67.svg'),
('纪录片', '科学·探索·自然', 'science', 178,'https://socialsisteryi.github.io/bilibili-API-collect/assets/documentary-2c550e67.svg'),
('纪录片', '军事', 'military', 179,'https://socialsisteryi.github.io/bilibili-API-collect/assets/documentary-2c550e67.svg'),
('纪录片', '社会·美食·旅行', 'travel', 180,'https://socialsisteryi.github.io/bilibili-API-collect/assets/documentary-2c550e67.svg'),
('电影', '电影', 'movie', 23,'https://socialsisteryi.github.io/bilibili-API-collect/assets/movie-693cc994.svg'),
('电影', '华语电影', 'chinese', 147,'https://socialsisteryi.github.io/bilibili-API-collect/assets/movie-693cc994.svg'),
('电影', '欧美电影', 'west', 145,'https://socialsisteryi.github.io/bilibili-API-collect/assets/movie-693cc994.svg'),
('电影', '日本电影', 'japan', 146,'https://socialsisteryi.github.io/bilibili-API-collect/assets/movie-693cc994.svg'),
('电影', '其他国家', 'movie', 83,'https://socialsisteryi.github.io/bilibili-API-collect/assets/movie-693cc994.svg'),
('电视剧', '电视剧', 'tv', 11,'https://socialsisteryi.github.io/bilibili-API-collect/assets/teleplay-1f3272a8.svg'),
('电视剧', '国产剧', 'mainland', 185,'https://socialsisteryi.github.io/bilibili-API-collect/assets/teleplay-1f3272a8.svg'),
('电视剧', '海外剧', 'overseas', 187,'https://socialsisteryi.github.io/bilibili-API-collect/assets/teleplay-1f3272a8.svg'),
]
# 先清空表
cursor.execute('DELETE FROM video_categories')
# 插入所有数据
cursor.executemany('''
INSERT INTO video_categories (main_category, sub_category, alias, tid, image)
VALUES (?, ?, ?, ?, ?)
''', categories_data)
conn.commit()
print("视频分类表初始化成功!")
except sqlite3.Error as e:
print(f"数据库错误: {e}")
conn.rollback()
finally:
conn.close()
if __name__ == '__main__':
init_categories()
|
2929004360/ruoyi-sign
| 4,292
|
ruoyi-common/src/main/java/com/ruoyi/common/core/domain/model/LoginUser.java
|
package com.ruoyi.common.core.domain.model;
import com.alibaba.fastjson2.annotation.JSONField;
import com.ruoyi.common.core.domain.entity.SysUser;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import java.util.Collection;
import java.util.Set;
/**
* 登录用户身份权限
*
* @author ruoyi
*/
public class LoginUser implements UserDetails
{
private static final long serialVersionUID = 1L;
/**
* 用户ID
*/
private Long userId;
/**
* 部门ID
*/
private Long deptId;
/**
* 用户唯一标识
*/
private String token;
/**
* 登录时间
*/
private Long loginTime;
/**
* 过期时间
*/
private Long expireTime;
/**
* 登录IP地址
*/
private String ipaddr;
/**
* 登录地点
*/
private String loginLocation;
/**
* 浏览器类型
*/
private String browser;
/**
* 操作系统
*/
private String os;
/**
* 权限列表
*/
private Set<String> permissions;
/**
* 用户信息
*/
private SysUser user;
public LoginUser()
{
}
public LoginUser(SysUser user, Set<String> permissions)
{
this.user = user;
this.permissions = permissions;
}
public LoginUser(Long userId, Long deptId, SysUser user, Set<String> permissions)
{
this.userId = userId;
this.deptId = deptId;
this.user = user;
this.permissions = permissions;
}
public Long getUserId()
{
return userId;
}
public void setUserId(Long userId)
{
this.userId = userId;
}
public Long getDeptId()
{
return deptId;
}
public void setDeptId(Long deptId)
{
this.deptId = deptId;
}
public String getToken()
{
return token;
}
public void setToken(String token)
{
this.token = token;
}
@JSONField(serialize = false)
@Override
public String getPassword()
{
return user.getPassword();
}
@Override
public String getUsername()
{
return user.getUserName();
}
/**
* 账户是否未过期,过期无法验证
*/
@JSONField(serialize = false)
@Override
public boolean isAccountNonExpired()
{
return true;
}
/**
* 指定用户是否解锁,锁定的用户无法进行身份验证
*
* @return
*/
@JSONField(serialize = false)
@Override
public boolean isAccountNonLocked()
{
return true;
}
/**
* 指示是否已过期的用户的凭据(密码),过期的凭据防止认证
*
* @return
*/
@JSONField(serialize = false)
@Override
public boolean isCredentialsNonExpired()
{
return true;
}
/**
* 是否可用 ,禁用的用户不能身份验证
*
* @return
*/
@JSONField(serialize = false)
@Override
public boolean isEnabled()
{
return true;
}
public Long getLoginTime()
{
return loginTime;
}
public void setLoginTime(Long loginTime)
{
this.loginTime = loginTime;
}
public String getIpaddr()
{
return ipaddr;
}
public void setIpaddr(String ipaddr)
{
this.ipaddr = ipaddr;
}
public String getLoginLocation()
{
return loginLocation;
}
public void setLoginLocation(String loginLocation)
{
this.loginLocation = loginLocation;
}
public String getBrowser()
{
return browser;
}
public void setBrowser(String browser)
{
this.browser = browser;
}
public String getOs()
{
return os;
}
public void setOs(String os)
{
this.os = os;
}
public Long getExpireTime()
{
return expireTime;
}
public void setExpireTime(Long expireTime)
{
this.expireTime = expireTime;
}
public Set<String> getPermissions()
{
return permissions;
}
public void setPermissions(Set<String> permissions)
{
this.permissions = permissions;
}
public SysUser getUser()
{
return user;
}
public void setUser(SysUser user)
{
this.user = user;
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities()
{
return null;
}
}
|
2977094657/BilibiliHistoryFetcher
| 15,410
|
scripts/analyze_bilibili_history.py
|
import json
import os
from collections import defaultdict
from datetime import datetime
import sqlite3
from scripts.utils import load_config, get_output_path
config = load_config()
def get_db():
"""获取数据库连接"""
db_path = get_output_path(config['db_file'])
return sqlite3.connect(db_path)
def get_current_year():
"""获取当前年份"""
return datetime.now().year
def load_history_from_db():
"""从数据库加载历史记录数据"""
conn = get_db()
try:
cursor = conn.cursor()
current_year = get_current_year()
table_name = f"bilibili_history_{current_year}"
# 检查表是否存在
cursor.execute(f"""
SELECT name FROM sqlite_master
WHERE type='table' AND name=?
""", (table_name,))
if not cursor.fetchone():
print(f"表 {table_name} 不存在")
return []
# 查询所有记录
cursor.execute(f"SELECT view_at FROM {table_name}")
return cursor.fetchall()
except sqlite3.Error as e:
print(f"数据库错误: {e}")
return []
finally:
conn.close()
def calculate_video_counts(history_data):
"""统计每天和每月的视频观看数量"""
current_year = datetime.now().year
daily_count = defaultdict(int)
monthly_count = defaultdict(int)
for (view_at,) in history_data:
view_time = datetime.fromtimestamp(view_at)
if view_time.year != current_year:
continue
date_str = view_time.strftime('%Y-%m-%d')
month_str = view_time.strftime('%Y-%m')
daily_count[date_str] += 1
monthly_count[month_str] += 1
return daily_count, monthly_count
def save_daily_count_to_json(daily_count, year):
"""保存每天的观看数量到 JSON 文件"""
output_file = get_output_path(f'daily_count_{year}.json')
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(daily_count, f, ensure_ascii=False, indent=4)
print(f"每天观看数量已保存到 {output_file}")
return output_file
def analyze_history_by_params(date_str=None, start_date=None, end_date=None):
"""根据参数分析历史数据
Args:
date_str: 指定日期,格式为YYYY-MM-DD
start_date: 起始日期,格式为YYYY-MM-DD
end_date: 结束日期,格式为YYYY-MM-DD
"""
conn = get_db()
try:
cursor = conn.cursor()
current_year = get_current_year()
table_name = f"bilibili_history_{current_year}"
# 构建基础查询
query = f"SELECT view_at FROM {table_name} WHERE 1=1"
params = []
# 处理日期条件
if date_str:
start_timestamp = int(datetime.strptime(date_str, '%Y-%m-%d').timestamp())
end_timestamp = start_timestamp + 86400 # 加一天
query += " AND view_at >= ? AND view_at < ?"
params.extend([start_timestamp, end_timestamp])
elif start_date or end_date:
if start_date:
start_timestamp = int(datetime.strptime(start_date, '%Y-%m-%d').timestamp())
query += " AND view_at >= ?"
params.append(start_timestamp)
if end_date:
end_timestamp = int(datetime.strptime(end_date, '%Y-%m-%d').timestamp()) + 86400
query += " AND view_at < ?"
params.append(end_timestamp)
# 执行查询
cursor.execute(query, params)
history_data = cursor.fetchall()
# 计算统计数据
daily_count = defaultdict(int)
monthly_count = defaultdict(int)
for (view_at,) in history_data:
view_time = datetime.fromtimestamp(view_at)
current_date = view_time.strftime('%Y-%m-%d')
month_str = view_time.strftime('%Y-%m')
daily_count[current_date] += 1
monthly_count[month_str] += 1
result = {}
# 如果指定了具体日期,返回该日期的观看数量
if date_str:
count = daily_count.get(date_str, 0)
result["date_count"] = {
"date": date_str,
"count": count
}
# 如果指定了日期范围,返回该范围内的数据
if start_date or end_date:
total_count = sum(daily_count.values())
result["date_range"] = {
"start_date": start_date or "无限制",
"end_date": end_date or "无限制",
"total_count": total_count,
"daily_counts": dict(daily_count)
}
return result
except sqlite3.Error as e:
print(f"数据库错误: {e}")
return {"error": f"数据库错误: {e}"}
finally:
conn.close()
def get_daily_counts(year=None):
"""获取每日观看数量"""
conn = get_db()
try:
cursor = conn.cursor()
table_name = f"bilibili_history_{year}" if year else None
if not table_name:
# 获取所有年份的表
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND name LIKE 'bilibili_history_%'
""")
tables = cursor.fetchall()
if not tables:
return {"error": "未找到任何历史记录数据"}
# 合并所有年份的数据
daily_count = {}
for (table,) in tables:
# 按日期统计观看数量
cursor.execute(f"""
SELECT
strftime('%Y-%m-%d', datetime(view_at, 'unixepoch')) as date,
COUNT(*) as count
FROM {table}
GROUP BY date
ORDER BY date
""")
for row in cursor.fetchall():
date, count = row
if date in daily_count:
daily_count[date] += count
else:
daily_count[date] = count
else:
# 查询指定年份的数据
cursor.execute(f"""
SELECT
strftime('%Y-%m-%d', datetime(view_at, 'unixepoch')) as date,
COUNT(*) as count
FROM {table_name}
GROUP BY date
ORDER BY date
""")
daily_count = {row[0]: row[1] for row in cursor.fetchall()}
# 保存到JSON文件
try:
if year:
output_file = save_daily_count_to_json(daily_count, year)
else:
# 按年份拆分数据并保存
daily_count_by_year = {}
for date, count in daily_count.items():
year = date.split('-')[0]
if year not in daily_count_by_year:
daily_count_by_year[year] = {}
daily_count_by_year[year][date] = count
# 保存每年的数据到单独的文件
for year, data in daily_count_by_year.items():
output_file = save_daily_count_to_json(data, year)
print(f"数据已保存到: {output_file}")
except Exception as e:
print(f"保存JSON文件时出错: {e}")
return {"error": f"保存JSON文件时出错: {e}"}
return daily_count
except sqlite3.Error as e:
print(f"数据库错误: {e}")
return {"error": f"数据库错误: {e}"}
finally:
conn.close()
def get_monthly_counts(year=None):
"""获取每月观看数量"""
conn = get_db()
try:
cursor = conn.cursor()
table_name = f"bilibili_history_{year}" if year else None
if not table_name:
# 获取所有年份的表
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND name LIKE 'bilibili_history_%'
""")
tables = cursor.fetchall()
if not tables:
return {"error": "未找到任何历史记录数据"}
# 合并所有年份的数据
monthly_count = {}
for (table,) in tables:
cursor.execute(f"""
SELECT
strftime('%Y-%m', datetime(view_at, 'unixepoch')) as month,
COUNT(*) as count
FROM {table}
GROUP BY month
ORDER BY month
""")
for row in cursor.fetchall():
month, count = row
if month in monthly_count:
monthly_count[month] += count
else:
monthly_count[month] = count
else:
cursor.execute(f"""
SELECT
strftime('%Y-%m', datetime(view_at, 'unixepoch')) as month,
COUNT(*) as count
FROM {table_name}
GROUP BY month
ORDER BY month
""")
monthly_count = {row[0]: row[1] for row in cursor.fetchall()}
return monthly_count
except sqlite3.Error as e:
print(f"数据库错误: {e}")
return {"error": f"数据库错误: {e}"}
finally:
conn.close()
def get_available_years():
"""获取可用的年份列表"""
conn = get_db()
try:
cursor = conn.cursor()
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND name LIKE 'bilibili_history_%'
""")
tables = cursor.fetchall()
years = []
for (table_name,) in tables:
try:
year = int(table_name.split('_')[-1])
years.append(year)
except (ValueError, IndexError):
continue
return sorted(years, reverse=True)
except sqlite3.Error as e:
print(f"数据库错误: {e}")
return []
finally:
conn.close()
def get_daily_and_monthly_counts(target_year=None):
"""获取每日和每月的观看数量统计
Args:
target_year: 要分析的年份,不传则分析所有年份
"""
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"========== 运行时间: {current_time} ==========")
conn = get_db()
try:
cursor = conn.cursor()
# 获取所有可用年份
available_years = get_available_years()
if not available_years:
return {"error": "未找到任何历史记录数据"}
# 如果指定了年份,检查是否可用
if target_year is not None:
if target_year not in available_years:
return {"error": f"未找到 {target_year} 年的历史记录数据"}
years_to_analyze = [target_year]
else:
years_to_analyze = available_years
daily_count = {}
monthly_count = {}
total_count = 0
# 新增: 时长统计
daily_watch_seconds = {}
monthly_watch_seconds = {}
total_watch_seconds = 0
# 分析每个年份的数据
for year in years_to_analyze:
table_name = f"bilibili_history_{year}"
# 获取每日观看数量,使用localtime进行时区转换
cursor.execute(f"""
SELECT
strftime('%Y-%m-%d', datetime(view_at, 'unixepoch', 'localtime')) as date,
COUNT(*) as count
FROM {table_name}
GROUP BY date
ORDER BY date
""")
year_daily_count = {row[0]: row[1] for row in cursor.fetchall()}
daily_count.update(year_daily_count)
# 获取每日观看总时长(秒),使用localtime进行时区转换
cursor.execute(f"""
SELECT
strftime('%Y-%m-%d', datetime(view_at, 'unixepoch', 'localtime')) as date,
SUM(
CASE
WHEN progress = -1 THEN duration
WHEN progress IS NULL THEN 0
WHEN progress >= 0 THEN CASE WHEN progress > duration THEN duration ELSE progress END
ELSE 0
END
) as seconds
FROM {table_name}
GROUP BY date
ORDER BY date
""")
year_daily_watch = {row[0]: int(row[1] or 0) for row in cursor.fetchall()}
daily_watch_seconds.update(year_daily_watch)
total_watch_seconds += sum(year_daily_watch.values())
# 获取每月观看数量,使用localtime进行时区转换
cursor.execute(f"""
SELECT
strftime('%Y-%m', datetime(view_at, 'unixepoch', 'localtime')) as month,
COUNT(*) as count
FROM {table_name}
GROUP BY month
ORDER BY month
""")
year_monthly_count = {row[0]: row[1] for row in cursor.fetchall()}
monthly_count.update(year_monthly_count)
# 获取每月观看总时长(秒),使用localtime进行时区转换
cursor.execute(f"""
SELECT
strftime('%Y-%m', datetime(view_at, 'unixepoch', 'localtime')) as month,
SUM(
CASE
WHEN progress = -1 THEN duration
WHEN progress IS NULL THEN 0
WHEN progress >= 0 THEN CASE WHEN progress > duration THEN duration ELSE progress END
ELSE 0
END
) as seconds
FROM {table_name}
GROUP BY month
ORDER BY month
""")
year_monthly_watch = {row[0]: int(row[1] or 0) for row in cursor.fetchall()}
monthly_watch_seconds.update(year_monthly_watch)
# 计算该年份的总数
total_count += sum(year_daily_count.values())
# 输出每月的视频观看统计
print(f"\n{year}年每月观看视频数量:")
for month, count in sorted(year_monthly_count.items()):
print(f"{month}: {count} 个视频")
# 保存每日观看数量到JSON文件
try:
if target_year:
output_file = save_daily_count_to_json(daily_count, target_year)
print(f"每日观看数量已保存到: {output_file}")
else:
# 按年份拆分数据并保存
daily_count_by_year = {}
for date, count in daily_count.items():
year = date.split('-')[0]
if year not in daily_count_by_year:
daily_count_by_year[year] = {}
daily_count_by_year[year][date] = count
# 保存每年的数据到单独的文件
for year, data in daily_count_by_year.items():
output_file = save_daily_count_to_json(data, year)
print(f"{year}年每日观看数量已保存到: {output_file}")
except Exception as e:
print(f"保存JSON文件时出错: {e}")
return {
"daily_count": daily_count,
"monthly_count": monthly_count,
"total_count": total_count,
"daily_watch_seconds": daily_watch_seconds,
"monthly_watch_seconds": monthly_watch_seconds,
"total_watch_seconds": total_watch_seconds
}
except sqlite3.Error as e:
print(f"数据库错误: {e}")
return {"error": f"数据库错误: {e}"}
finally:
if conn:
conn.close()
# 如果该脚本直接运行,则调用 main()
if __name__ == '__main__':
result = get_daily_and_monthly_counts()
if "error" in result:
print(f"错误: {result['error']}")
else:
print("\n分析完成!")
|
2929004360/ruoyi-sign
| 4,085
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/spring/SpringUtils.java
|
package com.ruoyi.common.utils.spring;
import org.springframework.aop.framework.AopContext;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
import com.ruoyi.common.utils.StringUtils;
/**
* spring工具类 方便在非spring管理环境中获取bean
*
* @author ruoyi
*/
@Component
public final class SpringUtils implements BeanFactoryPostProcessor, ApplicationContextAware
{
/** Spring应用上下文环境 */
private static ConfigurableListableBeanFactory beanFactory;
private static ApplicationContext applicationContext;
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException
{
SpringUtils.beanFactory = beanFactory;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException
{
SpringUtils.applicationContext = applicationContext;
}
/**
* 获取对象
*
* @param name
* @return Object 一个以所给名字注册的bean的实例
* @throws org.springframework.beans.BeansException
*
*/
@SuppressWarnings("unchecked")
public static <T> T getBean(String name) throws BeansException
{
return (T) beanFactory.getBean(name);
}
/**
* 获取类型为requiredType的对象
*
* @param clz
* @return
* @throws org.springframework.beans.BeansException
*
*/
public static <T> T getBean(Class<T> clz) throws BeansException
{
T result = (T) beanFactory.getBean(clz);
return result;
}
/**
* 如果BeanFactory包含一个与所给名称匹配的bean定义,则返回true
*
* @param name
* @return boolean
*/
public static boolean containsBean(String name)
{
return beanFactory.containsBean(name);
}
/**
* 判断以给定名字注册的bean定义是一个singleton还是一个prototype。 如果与给定名字相应的bean定义没有被找到,将会抛出一个异常(NoSuchBeanDefinitionException)
*
* @param name
* @return boolean
* @throws org.springframework.beans.factory.NoSuchBeanDefinitionException
*
*/
public static boolean isSingleton(String name) throws NoSuchBeanDefinitionException
{
return beanFactory.isSingleton(name);
}
/**
* @param name
* @return Class 注册对象的类型
* @throws org.springframework.beans.factory.NoSuchBeanDefinitionException
*
*/
public static Class<?> getType(String name) throws NoSuchBeanDefinitionException
{
return beanFactory.getType(name);
}
/**
* 如果给定的bean名字在bean定义中有别名,则返回这些别名
*
* @param name
* @return
* @throws org.springframework.beans.factory.NoSuchBeanDefinitionException
*
*/
public static String[] getAliases(String name) throws NoSuchBeanDefinitionException
{
return beanFactory.getAliases(name);
}
/**
* 获取aop代理对象
*
* @param invoker
* @return
*/
@SuppressWarnings("unchecked")
public static <T> T getAopProxy(T invoker)
{
return (T) AopContext.currentProxy();
}
/**
* 获取当前的环境配置,无配置返回null
*
* @return 当前的环境配置
*/
public static String[] getActiveProfiles()
{
return applicationContext.getEnvironment().getActiveProfiles();
}
/**
* 获取当前的环境配置,当有多个环境配置时,只获取第一个
*
* @return 当前的环境配置
*/
public static String getActiveProfile()
{
final String[] activeProfiles = getActiveProfiles();
return StringUtils.isNotEmpty(activeProfiles) ? activeProfiles[0] : null;
}
/**
* 获取配置文件中的值
*
* @param key 配置文件的key
* @return 当前的配置文件的值
*
*/
public static String getRequiredProperty(String key)
{
return applicationContext.getEnvironment().getRequiredProperty(key);
}
}
|
281677160/openwrt-package
| 30,442
|
luci-app-homeproxy/htdocs/luci-static/resources/view/homeproxy/server.js
|
/*
* SPDX-License-Identifier: GPL-2.0-only
*
* Copyright (C) 2022-2025 ImmortalWrt.org
*/
'use strict';
'require form';
'require poll';
'require rpc';
'require uci';
'require ui';
'require view';
'require homeproxy as hp';
'require tools.widgets as widgets';
const callServiceList = rpc.declare({
object: 'service',
method: 'list',
params: ['name'],
expect: { '': {} }
});
const CBIGenValue = form.Value.extend({
__name__: 'CBI.GenValue',
renderWidget(/* ... */) {
let node = form.Value.prototype.renderWidget.apply(this, arguments);
if (!this.password)
node.classList.add('control-group');
(node.querySelector('.control-group') || node).appendChild(E('button', {
class: 'cbi-button cbi-button-add',
title: _('Generate'),
click: ui.createHandlerFn(this, handleGenKey, this.hp_options || this.option)
}, [ _('Generate') ]));
return node;
}
});
function getServiceStatus() {
return L.resolveDefault(callServiceList('homeproxy'), {}).then((res) => {
let isRunning = false;
try {
isRunning = res['homeproxy']['instances']['sing-box-s']['running'];
} catch (e) { }
return isRunning;
});
}
function renderStatus(isRunning, version) {
let spanTemp = '<em><span style="color:%s"><strong>%s (sing-box v%s) %s</strong></span></em>';
let renderHTML;
if (isRunning)
renderHTML = spanTemp.format('green', _('HomeProxy Server'), version, _('RUNNING'));
else
renderHTML = spanTemp.format('red', _('HomeProxy Server'), version, _('NOT RUNNING'));
return renderHTML;
}
function handleGenKey(option) {
let section_id = this.section.section;
let type = this.section.getOption('type')?.formvalue(section_id);
let widget = L.bind((option) => {
return this.map.findElement('id', 'widget.' + this.cbid(section_id).replace(/\.[^\.]+$/, '.') + option);
}, this);
const callSingBoxGenerator = rpc.declare({
object: 'luci.homeproxy',
method: 'singbox_generator',
params: ['type', 'params'],
expect: { '': {} }
});
if (typeof option === 'object') {
return callSingBoxGenerator(option.type, option.params).then((res) => {
if (res.result)
option.callback.call(this, res.result).forEach(([k, v]) => {
widget(k).value = v ?? '';
});
else
ui.addNotification(null, E('p', _('Failed to generate %s, error: %s.').format(type, res.error)));
});
} else {
let password, required_method;
if (option === 'uuid')
required_method = 'uuid';
else if (type === 'shadowsocks')
required_method = this.section.getOption('shadowsocks_encrypt_method')?.formvalue(section_id);
switch (required_method) {
case 'none':
password = '';
break;
case 'uuid':
password = hp.generateRand('uuid');
break;
default:
password = hp.generateRand('hex', 16);
break;
}
/* AEAD */
((length) => {
if (length && length > 0)
password = hp.generateRand('base64', length);
})(hp.shadowsocks_encrypt_length[required_method]);
return widget(option).value = password;
}
}
return view.extend({
load() {
return Promise.all([
uci.load('homeproxy'),
hp.getBuiltinFeatures()
]);
},
render(data) {
let m, s, o;
let features = data[1];
m = new form.Map('homeproxy', _('HomeProxy Server'),
_('The modern ImmortalWrt proxy platform for ARM64/AMD64.'));
s = m.section(form.TypedSection);
s.render = function() {
poll.add(() => {
return L.resolveDefault(getServiceStatus()).then((res) => {
let view = document.getElementById('service_status');
view.innerHTML = renderStatus(res, features.version);
});
});
return E('div', { class: 'cbi-section', id: 'status_bar' }, [
E('p', { id: 'service_status' }, _('Collecting data...'))
]);
}
s = m.section(form.NamedSection, 'server', 'homeproxy', _('Global settings'));
o = s.option(form.Flag, 'enabled', _('Enable'));
o.rmempty = false;
s = m.section(form.GridSection, 'server', _('Server settings'));
s.addremove = true;
s.rowcolors = true;
s.sortable = true;
s.nodescriptions = true;
s.modaltitle = L.bind(hp.loadModalTitle, this, _('Server'), _('Add a server'), data[0]);
s.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
s.renderSectionAdd = L.bind(hp.renderSectionAdd, this, s);
o = s.option(form.Value, 'label', _('Label'));
o.load = L.bind(hp.loadDefaultLabel, this, data[0]);
o.validate = L.bind(hp.validateUniqueValue, this, data[0], 'server', 'label');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Flag, 'enabled', _('Enable'));
o.default = o.enabled;
o.rmempty = false;
o.editable = true;
o = s.option(form.Flag, 'firewall', _('Firewall'),
_('Allow access from the Internet.'));
o.editable = true;
o = s.option(form.ListValue, 'type', _('Type'));
o.value('anytls', _('AnyTLS'));
o.value('http', _('HTTP'));
if (features.with_quic) {
o.value('hysteria', _('Hysteria'));
o.value('hysteria2', _('Hysteria2'));
o.value('naive', _('NaïveProxy'));
}
o.value('mixed', _('Mixed'));
o.value('shadowsocks', _('Shadowsocks'));
o.value('socks', _('Socks'));
o.value('trojan', _('Trojan'));
if (features.with_quic)
o.value('tuic', _('Tuic'));
o.value('vless', _('VLESS'));
o.value('vmess', _('VMess'));
o.rmempty = false;
o = s.option(form.Value, 'address', _('Listen address'));
o.placeholder = '::';
o.datatype = 'ipaddr';
o.modalonly = true;
o = s.option(form.Value, 'port', _('Listen port'),
_('The port must be unique.'));
o.datatype = 'port';
o.validate = L.bind(hp.validateUniqueValue, this, data[0], 'server', 'port');
o = s.option(form.Value, 'username', _('Username'));
o.depends('type', 'http');
o.depends('type', 'mixed');
o.depends('type', 'naive');
o.depends('type', 'socks');
o.modalonly = true;
o = s.option(CBIGenValue, 'password', _('Password'));
o.password = true;
o.depends('type', 'anytls');
o.depends({'type': /^(http|mixed|naive|socks)$/, 'username': /[\s\S]/});
o.depends('type', 'hysteria2');
o.depends('type', 'shadowsocks');
o.depends('type', 'trojan');
o.depends('type', 'tuic');
o.validate = function(section_id, value) {
if (section_id) {
let type = this.section.formvalue(section_id, 'type');
let required_type = [ 'anytls', 'http', 'mixed', 'naive', 'socks', 'shadowsocks', 'trojan' ];
if (required_type.includes(type)) {
if (type === 'shadowsocks') {
let encmode = this.section.formvalue(section_id, 'shadowsocks_encrypt_method');
if (encmode === 'none')
return true;
else if (encmode === '2022-blake3-aes-128-gcm')
return hp.validateBase64Key(24, section_id, value);
else if (['2022-blake3-aes-256-gcm', '2022-blake3-chacha20-poly1305'].includes(encmode))
return hp.validateBase64Key(44, section_id, value);
}
if (!value)
return _('Expecting: %s').format(_('non-empty value'));
}
}
return true;
}
o.modalonly = true;
/* AnyTLS config */
o = s.option(form.DynamicList, 'anytls_padding_scheme', _('Padding scheme'),
_('AnyTLS padding scheme in array.'));
o.depends('type', 'anytls');
o.modalonly = true;
/* Hysteria (2) config start */
o = s.option(form.ListValue, 'hysteria_protocol', _('Protocol'));
o.value('udp');
/* WeChat-Video / FakeTCP are unsupported by sing-box currently
o.value('wechat-video');
o.value('faketcp');
*/
o.default = 'udp';
o.depends('type', 'hysteria');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'hysteria_down_mbps', _('Max download speed'),
_('Max download speed in Mbps.'));
o.datatype = 'uinteger';
o.depends('type', 'hysteria');
o.depends('type', 'hysteria2');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_up_mbps', _('Max upload speed'),
_('Max upload speed in Mbps.'));
o.datatype = 'uinteger';
o.depends('type', 'hysteria');
o.depends('type', 'hysteria2');
o.modalonly = true;
o = s.option(form.ListValue, 'hysteria_auth_type', _('Authentication type'));
o.value('', _('Disable'));
o.value('base64', _('Base64'));
o.value('string', _('String'));
o.depends('type', 'hysteria');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_auth_payload', _('Authentication payload'));
o.password = true;
o.depends({'type': 'hysteria', 'hysteria_auth_type': /[\s\S]/});
o.rmempty = false;
o.modalonly = true;
o = s.option(form.ListValue, 'hysteria_obfs_type', _('Obfuscate type'));
o.value('', _('Disable'));
o.value('salamander', _('Salamander'));
o.depends('type', 'hysteria2');
o.modalonly = true;
o = s.option(CBIGenValue, 'hysteria_obfs_password', _('Obfuscate password'));
o.password = true;
o.depends('type', 'hysteria');
o.depends({'type': 'hysteria2', 'hysteria_obfs_type': /[\s\S]/});
o.modalonly = true;
o = s.option(form.Value, 'hysteria_recv_window_conn', _('QUIC stream receive window'),
_('The QUIC stream-level flow control window for receiving data.'));
o.datatype = 'uinteger';
o.default = '67108864';
o.depends('type', 'hysteria');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_recv_window_client', _('QUIC connection receive window'),
_('The QUIC connection-level flow control window for receiving data.'));
o.datatype = 'uinteger';
o.default = '15728640';
o.depends('type', 'hysteria');
o.modalonly = true;
o = s.option(form.Value, 'hysteria_max_conn_client', _('QUIC maximum concurrent bidirectional streams'),
_('The maximum number of QUIC concurrent bidirectional streams that a peer is allowed to open.'));
o.datatype = 'uinteger';
o.default = '1024';
o.depends('type', 'hysteria');
o.modalonly = true;
o = s.option(form.Flag, 'hysteria_disable_mtu_discovery', _('Disable Path MTU discovery'),
_('Disables Path MTU Discovery (RFC 8899). Packets will then be at most 1252 (IPv4) / 1232 (IPv6) bytes in size.'));
o.depends('type', 'hysteria');
o.modalonly = true;
o = s.option(form.Flag, 'hysteria_ignore_client_bandwidth', _('Ignore client bandwidth'),
_('Tell the client to use the BBR flow control algorithm instead of Hysteria CC.'));
o.depends({'type': 'hysteria2', 'hysteria_down_mbps': '', 'hysteria_up_mbps': ''});
o.modalonly = true;
o = s.option(form.Value, 'hysteria_masquerade', _('Masquerade'),
_('HTTP3 server behavior when authentication fails.<br/>A 404 page will be returned if empty.'));
o.depends('type', 'hysteria2');
o.modalonly = true;
/* Hysteria (2) config end */
/* Shadowsocks config */
o = s.option(form.ListValue, 'shadowsocks_encrypt_method', _('Encrypt method'));
for (let i of hp.shadowsocks_encrypt_methods)
o.value(i);
o.default = 'aes-128-gcm';
o.depends('type', 'shadowsocks');
o.modalonly = true;
/* Tuic config start */
o = s.option(CBIGenValue, 'uuid', _('UUID'));
o.password = true;
o.depends('type', 'tuic');
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.validate = hp.validateUUID;
o.modalonly = true;
o = s.option(form.ListValue, 'tuic_congestion_control', _('Congestion control algorithm'),
_('QUIC congestion control algorithm.'));
o.value('cubic');
o.value('new_reno');
o.value('bbr');
o.default = 'cubic';
o.depends('type', 'tuic');
o.modalonly = true;
o = s.option(form.Value, 'tuic_auth_timeout', _('Auth timeout'),
_('How long the server should wait for the client to send the authentication command (in seconds).'));
o.datatype = 'uinteger';
o.default = '3';
o.depends('type', 'tuic');
o.modalonly = true;
o = s.option(form.Flag, 'tuic_enable_zero_rtt', _('Enable 0-RTT handshake'),
_('Enable 0-RTT QUIC connection handshake on the client side. This is not impacting much on the performance, as the protocol is fully multiplexed.<br/>' +
'Disabling this is highly recommended, as it is vulnerable to replay attacks.'));
o.depends('type', 'tuic');
o.modalonly = true;
o = s.option(form.Value, 'tuic_heartbeat', _('Heartbeat interval'),
_('Interval for sending heartbeat packets for keeping the connection alive (in seconds).'));
o.datatype = 'uinteger';
o.default = '10';
o.depends('type', 'tuic');
o.modalonly = true;
/* Tuic config end */
/* VLESS / VMess config start */
o = s.option(form.ListValue, 'vless_flow', _('Flow'));
o.value('', _('None'));
o.value('xtls-rprx-vision');
o.depends('type', 'vless');
o.modalonly = true;
o = s.option(form.Value, 'vmess_alterid', _('Alter ID'),
_('Legacy protocol support (VMess MD5 Authentication) is provided for compatibility purposes only, use of alterId > 1 is not recommended.'));
o.datatype = 'uinteger';
o.depends('type', 'vmess');
o.modalonly = true;
/* VMess config end */
/* Transport config start */
o = s.option(form.ListValue, 'transport', _('Transport'),
_('No TCP transport, plain HTTP is merged into the HTTP transport.'));
o.value('', _('None'));
o.value('grpc', _('gRPC'));
o.value('http', _('HTTP'));
o.value('httpupgrade', _('HTTPUpgrade'));
o.value('quic', _('QUIC'));
o.value('ws', _('WebSocket'));
o.depends('type', 'trojan');
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.onchange = function(ev, section_id, value) {
let desc = this.map.findElement('id', 'cbid.homeproxy.%s.transport'.format(section_id)).nextElementSibling;
if (value === 'http')
desc.innerHTML = _('TLS is not enforced. If TLS is not configured, plain HTTP 1.1 is used.');
else if (value === 'quic')
desc.innerHTML = _('No additional encryption support: It\'s basically duplicate encryption.');
else
desc.innerHTML = _('No TCP transport, plain HTTP is merged into the HTTP transport.');
let tls_element = this.map.findElement('id', 'cbid.homeproxy.%s.tls'.format(section_id)).firstElementChild;
if ((value === 'http' && tls_element.checked) || (value === 'grpc' && !features.with_grpc))
this.map.findElement('id', 'cbid.homeproxy.%s.http_idle_timeout'.format(section_id)).nextElementSibling.innerHTML =
_('Specifies the time (in seconds) until idle clients should be closed with a GOAWAY frame. PING frames are not considered as activity.');
else if (value === 'grpc' && features.with_grpc)
this.map.findElement('id', 'cbid.homeproxy.%s.http_idle_timeout'.format(section_id)).nextElementSibling.innerHTML =
_('If the transport doesn\'t see any activity after a duration of this time (in seconds), it pings the client to check if the connection is still active.');
}
o.modalonly = true;
/* gRPC config start */
o = s.option(form.Value, 'grpc_servicename', _('gRPC service name'));
o.depends('transport', 'grpc');
o.modalonly = true;
/* gRPC config end */
/* HTTP(Upgrade) config start */
o = s.option(form.DynamicList, 'http_host', _('Host'));
o.datatype = 'hostname';
o.depends('transport', 'http');
o.modalonly = true;
o = s.option(form.Value, 'httpupgrade_host', _('Host'));
o.datatype = 'hostname';
o.depends('transport', 'httpupgrade');
o.modalonly = true;
o = s.option(form.Value, 'http_path', _('Path'));
o.depends('transport', 'http');
o.depends('transport', 'httpupgrade');
o.modalonly = true;
o = s.option(form.Value, 'http_method', _('Method'));
o.depends('transport', 'http');
o.modalonly = true;
o = s.option(form.Value, 'http_idle_timeout', _('Idle timeout'),
_('Specifies the time (in seconds) until idle clients should be closed with a GOAWAY frame. PING frames are not considered as activity.'));
o.datatype = 'uinteger';
o.depends('transport', 'grpc');
o.depends({'transport': 'http', 'tls': '1'});
o.modalonly = true;
if (features.with_grpc) {
o = s.option(form.Value, 'http_ping_timeout', _('Ping timeout'),
_('The timeout (in seconds) that after performing a keepalive check, the client will wait for activity. If no activity is detected, the connection will be closed.'));
o.datatype = 'uinteger';
o.depends('transport', 'grpc');
o.modalonly = true;
}
/* HTTP config end */
/* WebSocket config start */
o = s.option(form.Value, 'ws_host', _('Host'));
o.depends('transport', 'ws');
o.modalonly = true;
o = s.option(form.Value, 'ws_path', _('Path'));
o.depends('transport', 'ws');
o.modalonly = true;
o = s.option(form.Value, 'websocket_early_data', _('Early data'),
_('Allowed payload size is in the request.'));
o.datatype = 'uinteger';
o.value('2048');
o.depends('transport', 'ws');
o.modalonly = true;
o = s.option(form.Value, 'websocket_early_data_header', _('Early data header name'),
_('Early data is sent in path instead of header by default.') +
'<br/>' +
_('To be compatible with Xray-core, set this to <code>Sec-WebSocket-Protocol</code>.'));
o.value('Sec-WebSocket-Protocol');
o.depends('transport', 'ws');
o.modalonly = true;
/* WebSocket config end */
/* Transport config end */
/* Mux config start */
o = s.option(form.Flag, 'multiplex', _('Multiplex'));
o.depends('type', 'shadowsocks');
o.depends('type', 'trojan');
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.modalonly = true;
o = s.option(form.Flag, 'multiplex_padding', _('Enable padding'));
o.depends('multiplex', '1');
o.modalonly = true;
if (features.hp_has_tcp_brutal) {
o = s.option(form.Flag, 'multiplex_brutal', _('Enable TCP Brutal'),
_('Enable TCP Brutal congestion control algorithm'));
o.depends('multiplex', '1');
o.modalonly = true;
o = s.option(form.Value, 'multiplex_brutal_down', _('Download bandwidth'),
_('Download bandwidth in Mbps.'));
o.datatype = 'uinteger';
o.depends('multiplex_brutal', '1');
o.modalonly = true;
o = s.option(form.Value, 'multiplex_brutal_up', _('Upload bandwidth'),
_('Upload bandwidth in Mbps.'));
o.datatype = 'uinteger';
o.depends('multiplex_brutal', '1');
o.modalonly = true;
}
/* Mux config end */
/* TLS config start */
o = s.option(form.Flag, 'tls', _('TLS'));
o.depends('type', 'anytls');
o.depends('type', 'http');
o.depends('type', 'hysteria');
o.depends('type', 'hysteria2');
o.depends('type', 'naive');
o.depends('type', 'trojan');
o.depends('type', 'tuic');
o.depends('type', 'vless');
o.depends('type', 'vmess');
o.rmempty = false;
o.validate = function(section_id, value) {
if (section_id) {
let type = this.map.lookupOption('type', section_id)[0].formvalue(section_id);
let tls = this.map.findElement('id', 'cbid.homeproxy.%s.tls'.format(section_id)).firstElementChild;
if (['hysteria', 'hysteria2', 'tuic'].includes(type)) {
tls.checked = true;
tls.disabled = true;
} else {
tls.disabled = null;
}
}
return true;
}
o.modalonly = true;
o = s.option(form.Value, 'tls_sni', _('TLS SNI'),
_('Used to verify the hostname on the returned certificates unless insecure is given.'));
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.DynamicList, 'tls_alpn', _('TLS ALPN'),
_('List of supported application level protocols, in order of preference.'));
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.ListValue, 'tls_min_version', _('Minimum TLS version'),
_('The minimum TLS version that is acceptable.'));
o.value('', _('default'));
for (let i of hp.tls_versions)
o.value(i);
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.ListValue, 'tls_max_version', _('Maximum TLS version'),
_('The maximum TLS version that is acceptable.'));
o.value('', _('default'));
for (let i of hp.tls_versions)
o.value(i);
o.depends('tls', '1');
o.modalonly = true;
o = s.option(hp.CBIStaticList, 'tls_cipher_suites', _('Cipher suites'),
_('The elliptic curves that will be used in an ECDHE handshake, in preference order. If empty, the default will be used.'));
for (let i of hp.tls_cipher_suites)
o.value(i);
o.depends('tls', '1');
o.optional = true;
o.modalonly = true;
if (features.with_acme) {
o = s.option(form.Flag, 'tls_acme', _('Enable ACME'),
_('Use ACME TLS certificate issuer.'));
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.DynamicList, 'tls_acme_domain', _('Domains'));
o.datatype = 'hostname';
o.depends('tls_acme', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_acme_dsn', _('Default server name'),
_('Server name to use when choosing a certificate if the ClientHello\'s ServerName field is empty.'));
o.depends('tls_acme', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_acme_email', _('Email'),
_('The email address to use when creating or selecting an existing ACME server account.'));
o.depends('tls_acme', '1');
o.validate = function(section_id, value) {
if (section_id) {
if (!value)
return _('Expecting: %s').format('non-empty value');
else if (!value.match(/^[^\s@]+@[^\s@]+\.[^\s@]+$/))
return _('Expecting: %s').format('valid email address');
}
return true;
}
o.modalonly = true;
o = s.option(form.Value, 'tls_acme_provider', _('CA provider'),
_('The ACME CA provider to use.'));
o.value('letsencrypt', _('Let\'s Encrypt'));
o.value('zerossl', _('ZeroSSL'));
o.depends('tls_acme', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Flag, 'tls_dns01_challenge', _('DNS01 challenge'))
o.depends('tls_acme', '1');
o.modalonly = true;
o = s.option(form.ListValue, 'tls_dns01_provider', _('DNS provider'));
o.value('alidns', _('Alibaba Cloud DNS'));
o.value('cloudflare', _('Cloudflare'));
o.depends('tls_dns01_challenge', '1');
o.default = 'cloudflare';
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_dns01_ali_akid', _('Access key ID'));
o.password = true;
o.depends('tls_dns01_provider', 'alidns');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_dns01_ali_aksec', _('Access key secret'));
o.password = true;
o.depends('tls_dns01_provider', 'alidns');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_dns01_ali_rid', _('Region ID'));
o.depends('tls_dns01_provider', 'alidns');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_dns01_cf_api_token', _('API token'));
o.password = true;
o.depends('tls_dns01_provider', 'cloudflare');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Flag, 'tls_acme_dhc', _('Disable HTTP challenge'));
o.depends('tls_dns01_challenge', '0');
o.modalonly = true;
o = s.option(form.Flag, 'tls_acme_dtac', _('Disable TLS ALPN challenge'));
o.depends('tls_dns01_challenge', '0');
o.modalonly = true;
o = s.option(form.Value, 'tls_acme_ahp', _('Alternative HTTP port'),
_('The alternate port to use for the ACME HTTP challenge; if non-empty, this port will be used instead of 80 to spin up a listener for the HTTP challenge.'));
o.datatype = 'port';
o.depends('tls_dns01_challenge', '0');
o.modalonly = true;
o = s.option(form.Value, 'tls_acme_atp', _('Alternative TLS port'),
_('The alternate port to use for the ACME TLS-ALPN challenge; the system must forward 443 to this port for challenge to succeed.'));
o.datatype = 'port';
o.depends('tls_dns01_challenge', '0');
o.modalonly = true;
o = s.option(form.Flag, 'tls_acme_external_account', _('External Account Binding'),
_('EAB (External Account Binding) contains information necessary to bind or map an ACME account to some other account known by the CA.' +
'<br/>External account bindings are "used to associate an ACME account with an existing account in a non-ACME system, such as a CA customer database.'));
o.depends('tls_acme', '1');
o.modalonly = true;
o = s.option(form.Value, 'tls_acme_ea_keyid', _('External account key ID'));
o.password = true;
o.depends('tls_acme_external_account', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_acme_ea_mackey', _('External account MAC key'));
o.password = true;
o.depends('tls_acme_external_account', '1');
o.rmempty = false;
o.modalonly = true;
}
o = s.option(form.Flag, 'tls_reality', _('REALITY'));
o.depends({'tls': '1', 'tls_acme': '0', 'type': /^(anytls|vless)$/});
o.depends({'tls': '1', 'tls_acme': null, 'type': /^(anytls|vless)$/});
o.modalonly = true;
o = s.option(CBIGenValue, 'tls_reality_private_key', _('REALITY private key'));
o.password = true;
o.hp_options = {
type: 'reality-keypair',
params: '',
callback: function(result) {
return [
[this.option, result.private_key],
['tls_reality_public_key', result.public_key]
]
}
}
o.depends('tls_reality', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_reality_public_key', _('REALITY public key'));
o.depends('tls_reality', '1');
o.modalonly = true;
o = s.option(form.DynamicList, 'tls_reality_short_id', _('REALITY short ID'));
o.depends('tls_reality', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_reality_max_time_difference', _('Max time difference'),
_('The maximum time difference between the server and the client.'));
o.depends('tls_reality', '1');
o.modalonly = true;
o = s.option(form.Value, 'tls_reality_server_addr', _('Handshake server address'));
o.datatype = 'hostname';
o.depends('tls_reality', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_reality_server_port', _('Handshake server port'));
o.datatype = 'port';
o.depends('tls_reality', '1');
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Value, 'tls_cert_path', _('Certificate path'),
_('The server public key, in PEM format.'));
o.value('/etc/homeproxy/certs/server_publickey.pem');
o.depends({'tls': '1', 'tls_acme': '0', 'tls_reality': null});
o.depends({'tls': '1', 'tls_acme': '0', 'tls_reality': '0'});
o.depends({'tls': '1', 'tls_acme': null, 'tls_reality': '0'});
o.depends({'tls': '1', 'tls_acme': null, 'tls_reality': null});
o.validate = hp.validateCertificatePath;
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Button, '_upload_cert', _('Upload certificate'),
_('<strong>Save your configuration before uploading files!</strong>'));
o.inputstyle = 'action';
o.inputtitle = _('Upload...');
o.depends({'tls': '1', 'tls_cert_path': '/etc/homeproxy/certs/server_publickey.pem'});
o.onclick = L.bind(hp.uploadCertificate, this, _('certificate'), 'server_publickey');
o.modalonly = true;
o = s.option(form.Value, 'tls_key_path', _('Key path'),
_('The server private key, in PEM format.'));
o.value('/etc/homeproxy/certs/server_privatekey.pem');
o.depends({'tls': '1', 'tls_acme': '0', 'tls_reality': '0'});
o.depends({'tls': '1', 'tls_acme': '0', 'tls_reality': null});
o.depends({'tls': '1', 'tls_acme': null, 'tls_reality': '0'});
o.depends({'tls': '1', 'tls_acme': null, 'tls_reality': null});
o.validate = hp.validateCertificatePath;
o.rmempty = false;
o.modalonly = true;
o = s.option(form.Button, '_upload_key', _('Upload key'),
_('<strong>Save your configuration before uploading files!</strong>'));
o.inputstyle = 'action';
o.inputtitle = _('Upload...');
o.depends({'tls': '1', 'tls_key_path': '/etc/homeproxy/certs/server_privatekey.pem'});
o.onclick = L.bind(hp.uploadCertificate, this, _('private key'), 'server_privatekey');
o.modalonly = true;
o = s.option(form.TextValue, 'tls_ech_key', _('ECH key'));
o.placeholder = '-----BEGIN ECH KEYS-----\nACBE2+piYBLrOywCbRYU+ZpEkk8keeBlUXbKqLRmQ/68FwBL/g0ARwAAIAAgn8HI\n93RfdV/LaDk+LC9H4h+4WhVBFmWKdhiT3vvpGi8ACAABAAEAAQADABRvdXRlci1z\nbmkuYW55LmRvbWFpbgAA\n-----END ECH KEYS-----';
o.monospace = true;
o.cols = 30
o.rows = 3;
o.hp_options = {
type: 'ech-keypair',
params: '',
callback: function(result) {
return [
[this.option, result.ech_key],
['tls_ech_config', result.ech_cfg]
]
}
}
o.renderWidget = function(section_id, option_index, cfgvalue) {
let node = form.TextValue.prototype.renderWidget.apply(this, arguments);
const cbid = this.cbid(section_id) + '._outer_sni';
node.appendChild(E('div', { 'class': 'control-group' }, [
E('input', {
id: cbid,
class: 'cbi-input-text',
style: 'width: 10em',
placeholder: 'outer-sni.any.domain'
}),
E('button', {
class: 'cbi-button cbi-button-add',
click: ui.createHandlerFn(this, () => {
this.hp_options.params = document.getElementById(cbid).value;
return handleGenKey.call(this, this.hp_options);
})
}, [ _('Generate') ])
]));
return node;
}
o.depends('tls', '1');
o.modalonly = true;
o = s.option(form.TextValue, 'tls_ech_config', _('ECH config'));
o.placeholder = '-----BEGIN ECH CONFIGS-----\nAEv+DQBHAAAgACCfwcj3dF91X8toOT4sL0fiH7haFUEWZYp2GJPe++kaLwAIAAEA\nAQABAAMAFG91dGVyLXNuaS5hbnkuZG9tYWluAAA=\n-----END ECH CONFIGS-----';
o.monospace = true;
o.cols = 30
o.rows = 3;
o.depends('tls', '1');
o.modalonly = true;
/* TLS config end */
/* Extra settings start */
o = s.option(form.Flag, 'tcp_fast_open', _('TCP fast open'),
_('Enable tcp fast open for listener.'));
o.depends({'network': 'udp', '!reverse': true});
o.modalonly = true;
o = s.option(form.Flag, 'tcp_multi_path', _('MultiPath TCP'));
o.depends({'network': 'udp', '!reverse': true});
o.modalonly = true;
o = s.option(form.Flag, 'udp_fragment', _('UDP Fragment'),
_('Enable UDP fragmentation.'));
o.depends({'network': 'tcp', '!reverse': true});
o.modalonly = true;
o = s.option(form.Value, 'udp_timeout', _('UDP NAT expiration time'),
_('In seconds.'));
o.datatype = 'uinteger';
o.placeholder = '300';
o.depends({'network': 'tcp', '!reverse': true});
o.modalonly = true;
o = s.option(form.ListValue, 'network', _('Network'));
o.value('tcp', _('TCP'));
o.value('udp', _('UDP'));
o.value('', _('Both'));
o.depends('type', 'naive');
o.depends('type', 'shadowsocks');
o.modalonly = true;
o = s.option(widgets.DeviceSelect, 'bind_interface', _('Bind interface'),
_('The network interface to bind to.'));
o.multiple = false;
o.noaliases = true;
o.modalonly = true;
o = s.option(form.Flag, 'reuse_addr', _('Reuse address'),
_('Reuse listener address.'));
o.modalonly = true;
/* Extra settings end */
return m.render();
}
});
|
2929004360/ruoyi-sign
| 2,667
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/bean/BeanUtils.java
|
package com.ruoyi.common.utils.bean;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Bean 工具类
*
* @author ruoyi
*/
public class BeanUtils extends org.springframework.beans.BeanUtils
{
/** Bean方法名中属性名开始的下标 */
private static final int BEAN_METHOD_PROP_INDEX = 3;
/** * 匹配getter方法的正则表达式 */
private static final Pattern GET_PATTERN = Pattern.compile("get(\\p{javaUpperCase}\\w*)");
/** * 匹配setter方法的正则表达式 */
private static final Pattern SET_PATTERN = Pattern.compile("set(\\p{javaUpperCase}\\w*)");
/**
* Bean属性复制工具方法。
*
* @param dest 目标对象
* @param src 源对象
*/
public static void copyBeanProp(Object dest, Object src)
{
try
{
copyProperties(src, dest);
}
catch (Exception e)
{
e.printStackTrace();
}
}
/**
* 获取对象的setter方法。
*
* @param obj 对象
* @return 对象的setter方法列表
*/
public static List<Method> getSetterMethods(Object obj)
{
// setter方法列表
List<Method> setterMethods = new ArrayList<Method>();
// 获取所有方法
Method[] methods = obj.getClass().getMethods();
// 查找setter方法
for (Method method : methods)
{
Matcher m = SET_PATTERN.matcher(method.getName());
if (m.matches() && (method.getParameterTypes().length == 1))
{
setterMethods.add(method);
}
}
// 返回setter方法列表
return setterMethods;
}
/**
* 获取对象的getter方法。
*
* @param obj 对象
* @return 对象的getter方法列表
*/
public static List<Method> getGetterMethods(Object obj)
{
// getter方法列表
List<Method> getterMethods = new ArrayList<Method>();
// 获取所有方法
Method[] methods = obj.getClass().getMethods();
// 查找getter方法
for (Method method : methods)
{
Matcher m = GET_PATTERN.matcher(method.getName());
if (m.matches() && (method.getParameterTypes().length == 0))
{
getterMethods.add(method);
}
}
// 返回getter方法列表
return getterMethods;
}
/**
* 检查Bean方法名中的属性名是否相等。<br>
* 如getName()和setName()属性名一样,getName()和setAge()属性名不一样。
*
* @param m1 方法名1
* @param m2 方法名2
* @return 属性名一样返回true,否则返回false
*/
public static boolean isMethodPropEquals(String m1, String m2)
{
return m1.substring(BEAN_METHOD_PROP_INDEX).equals(m2.substring(BEAN_METHOD_PROP_INDEX));
}
}
|
2977094657/BilibiliHistoryFetcher
| 12,026
|
scripts/check_data_integrity.py
|
import json
import logging
import os
import sqlite3
from datetime import datetime
# 配置日志
# 确保输出目录存在
os.makedirs("output/check", exist_ok=True)
# 设置自定义的日志格式化器,以正确处理中文字符
class EncodingFormatter(logging.Formatter):
def format(self, record):
msg = super().format(record)
# 确保返回的是原始字符串,不会被转义
if isinstance(msg, str):
return msg
else:
return str(msg)
# 配置日志处理程序
formatter = EncodingFormatter('%(asctime)s - %(levelname)s - %(message)s')
file_handler = logging.FileHandler("output/check/data_integrity_check.log", mode='a', encoding='utf-8')
file_handler.setFormatter(formatter)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.handlers = [] # 清除可能存在的处理程序
logger.addHandler(file_handler)
logger.addHandler(console_handler)
logger.propagate = False # 防止日志消息传播到根日志器
def get_json_files(json_root_path):
"""获取所有JSON文件的路径"""
json_files = []
for year_dir in os.listdir(json_root_path):
year_path = os.path.join(json_root_path, year_dir)
if not os.path.isdir(year_path) or not year_dir.isdigit():
continue
for month_dir in os.listdir(year_path):
month_path = os.path.join(year_path, month_dir)
if not os.path.isdir(month_path) or not month_dir.isdigit():
continue
for day_file in os.listdir(month_path):
if not day_file.endswith('.json'):
continue
day_path = os.path.join(month_path, day_file)
json_files.append({
'path': day_path,
'year': int(year_dir),
'month': int(month_dir),
'day': int(day_file.split('.')[0])
})
return json_files
def count_records_in_json_file(file_path):
"""统计JSON文件中的记录数量,并返回所有记录标题"""
try:
with open(file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
titles = [item.get('title', '未知标题') for item in data]
return len(data), titles
except Exception as e:
logger.error(f"读取JSON文件 {file_path} 时出错: {e}")
return 0, []
def get_db_tables(db_path):
"""获取数据库中的所有表名"""
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = [row[0] for row in cursor.fetchall()]
conn.close()
return tables
except Exception as e:
logger.error(f"获取数据库表时出错: {e}")
return []
def count_records_in_db_table(db_path, table_name):
"""统计数据库表中的记录数量"""
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
count = cursor.fetchone()[0]
conn.close()
return count
except Exception as e:
logger.error(f"统计表 {table_name} 记录数时出错: {e}")
return 0
def get_records_by_date(db_path, table_name, year, month, day):
"""获取某一天的数据库记录"""
try:
# 计算目标日期的时间戳范围
start_date = datetime(year, month, day).timestamp()
end_date = datetime(year, month, day, 23, 59, 59).timestamp()
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute(f"SELECT title, view_at FROM {table_name} WHERE view_at >= ? AND view_at <= ?",
(start_date, end_date))
records = cursor.fetchall()
titles = [record[0] for record in records]
conn.close()
return len(records), titles
except Exception as e:
logger.error(f"获取{year}年{month}月{day}日的记录时出错: {e}")
return 0, []
def check_data_integrity(db_path=None, json_root_path=None):
"""检查数据完整性"""
# 配置路径
if db_path is None:
db_path = os.path.join('output', 'bilibili_history.db')
if json_root_path is None:
json_root_path = os.path.join('output', 'history_by_date')
logger.info("开始数据完整性检查...")
logger.info(f"数据库路径: {db_path}")
logger.info(f"JSON文件路径: {json_root_path}")
# 创建输出目录
output_dir = os.path.join("output", "check")
os.makedirs(output_dir, exist_ok=True)
results = {
"total_json_files": 0,
"total_json_records": 0,
"total_db_records": 0,
"db_tables": [],
"missing_records": [],
"extra_records": []
}
# 获取数据库表
tables = get_db_tables(db_path)
results["db_tables"] = tables
logger.info(f"数据库包含以下表: {', '.join(tables)}")
# 获取历史表(以bilibili_history_开头的表)
history_tables = [table for table in tables if table.startswith('bilibili_history_')]
# 遍历所有JSON文件
json_files = get_json_files(json_root_path)
results["total_json_files"] = len(json_files)
logger.info(f"找到 {len(json_files)} 个JSON文件")
all_json_records = 0
all_db_records = 0
for file_info in json_files:
file_path = file_info['path']
year = file_info['year']
month = file_info['month']
day = file_info['day']
# 统计JSON文件中的记录
json_count, json_titles = count_records_in_json_file(file_path)
all_json_records += json_count
if json_count == 0:
logger.warning(f"JSON文件为空: {file_path}")
continue
# 查找对应年份的数据库表
table_name = f"bilibili_history_{year}"
if table_name not in history_tables:
logger.error(f"数据库中缺少表 {table_name}")
results["missing_records"].append({
"year": year,
"month": month,
"day": day,
"missing_count": json_count,
"missing_titles": json_titles,
"reason": f"数据库中缺少表 {table_name}"
})
continue
# 获取数据库中对应日期的记录
db_count, db_titles = get_records_by_date(db_path, table_name, year, month, day)
all_db_records += db_count
# 比较记录数量
if json_count > db_count:
missing_count = json_count - db_count
# 找出缺少的标题
missing_titles = [title for title in json_titles if title not in db_titles]
results["missing_records"].append({
"year": year,
"month": month,
"day": day,
"missing_count": missing_count,
"missing_titles": missing_titles[:10] if len(missing_titles) > 10 else missing_titles, # 最多显示10个
"reason": "数据库记录少于JSON文件"
})
logger.warning(f"{year}年{month}月{day}日 - 数据库中缺少 {missing_count} 条记录")
elif json_count < db_count:
extra_count = db_count - json_count
# 找出多余的标题
extra_titles = [title for title in db_titles if title not in json_titles]
results["extra_records"].append({
"year": year,
"month": month,
"day": day,
"extra_count": extra_count,
"extra_titles": extra_titles[:10] if len(extra_titles) > 10 else extra_titles, # 最多显示10个
"reason": "数据库记录多于JSON文件"
})
logger.warning(f"{year}年{month}月{day}日 - 数据库中多出 {extra_count} 条记录")
# 统计总记录数
results["total_json_records"] = all_json_records
results["total_db_records"] = all_db_records
for table in history_tables:
table_count = count_records_in_db_table(db_path, table)
logger.info(f"表 {table} 中有 {table_count} 条记录")
if all_json_records > all_db_records:
logger.error(f"总缺少记录数: {all_json_records - all_db_records}")
elif all_json_records < all_db_records:
logger.error(f"总多余记录数: {all_db_records - all_json_records}")
else:
logger.info("JSON文件和数据库中的记录数量完全匹配")
# 保存结果到JSON文件
result_file = os.path.join(output_dir, "data_integrity_results.json")
with open(result_file, 'w', encoding='utf-8') as f:
json.dump(results, f, ensure_ascii=False, indent=4)
logger.info(f"检查完成,结果已保存到 {result_file}")
# 生成报告
report_file = generate_report(results)
return {
"success": True,
"result_file": result_file,
"report_file": report_file,
"total_json_files": results["total_json_files"],
"total_json_records": results["total_json_records"],
"total_db_records": results["total_db_records"],
"missing_records_count": len(results["missing_records"]),
"extra_records_count": len(results["extra_records"]),
"difference": all_json_records - all_db_records
}
def generate_report(results):
"""生成报告"""
# 创建输出目录
output_dir = os.path.join("output", "check")
os.makedirs(output_dir, exist_ok=True)
report = ["# 数据完整性检查报告\n"]
# 基本信息
report.append(f"## 基本信息\n")
report.append(f"* 检查时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
report.append(f"* JSON文件总数: {results['total_json_files']}")
report.append(f"* JSON记录总数: {results['total_json_records']}")
report.append(f"* 数据库记录总数: {results['total_db_records']}")
report.append(f"* 数据库表: {', '.join(results['db_tables'])}\n")
# 总体差异
diff = results['total_json_records'] - results['total_db_records']
if diff > 0:
report.append(f"## 总体情况: 数据库缺少 {diff} 条记录\n")
elif diff < 0:
report.append(f"## 总体情况: 数据库多出 {-diff} 条记录\n")
else:
report.append(f"## 总体情况: 数据库和JSON文件记录数一致\n")
# 缺少的记录
if results["missing_records"]:
report.append(f"## 缺少的记录 (共 {len(results['missing_records'])} 天)\n")
for item in results["missing_records"]:
report.append(f"### {item['year']}年{item['month']}月{item['day']}日 - 缺少 {item['missing_count']} 条记录")
report.append(f"原因: {item['reason']}")
if item['missing_titles']:
report.append("缺少的标题示例:")
for title in item['missing_titles']:
report.append(f"* {title}")
report.append("")
# 多余的记录
if results["extra_records"]:
report.append(f"## 多余的记录 (共 {len(results['extra_records'])} 天)\n")
for item in results["extra_records"]:
report.append(f"### {item['year']}年{item['month']}月{item['day']}日 - 多出 {item['extra_count']} 条记录")
report.append(f"原因: {item['reason']}")
if item['extra_titles']:
report.append("多余的标题示例:")
for title in item['extra_titles']:
report.append(f"* {title}")
report.append("")
# 保存报告
report_file = os.path.join(output_dir, "data_integrity_report.md")
with open(report_file, 'w', encoding='utf-8') as f:
f.write('\n'.join(report))
logger.info(f"报告已保存到 {report_file}")
return report_file
def main():
db_path = os.path.join('output', 'bilibili_history.db')
json_root_path = os.path.join('output', 'history_by_date')
print("=" * 50)
print("开始检查数据完整性...")
print(f"数据库路径: {db_path}")
print(f"JSON文件路径: {json_root_path}")
print("=" * 50)
if not os.path.exists(db_path):
print(f"错误: 数据库文件不存在: {db_path}")
return
if not os.path.exists(json_root_path):
print(f"错误: JSON文件目录不存在: {json_root_path}")
return
results = check_data_integrity(db_path, json_root_path)
print("\n" + "=" * 50)
print(f"检查完成! 详细结果已保存到 {results['report_file']}")
print("=" * 50)
# 简要报告
diff = results['difference']
if diff > 0:
print(f"数据库总共缺少 {diff} 条记录")
if results["missing_records_count"] > 0:
print(f"\n缺少记录的日期数: {results['missing_records_count']}")
elif diff < 0:
print(f"数据库总共多出 {-diff} 条记录")
else:
print("数据库和JSON文件记录数一致")
if __name__ == "__main__":
main()
|
2929004360/ruoyi-sign
| 1,679
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/sql/SqlUtil.java
|
package com.ruoyi.common.utils.sql;
import com.ruoyi.common.exception.UtilException;
import com.ruoyi.common.utils.StringUtils;
/**
* sql操作工具类
*
* @author ruoyi
*/
public class SqlUtil
{
/**
* 定义常用的 sql关键字
*/
public static String SQL_REGEX = "and |extractvalue|updatexml|sleep|exec |insert |select |delete |update |drop |count |chr |mid |master |truncate |char |declare |or |union |like |+|/*|user()";
/**
* 仅支持字母、数字、下划线、空格、逗号、小数点(支持多个字段排序)
*/
public static String SQL_PATTERN = "[a-zA-Z0-9_\\ \\,\\.]+";
/**
* 限制orderBy最大长度
*/
private static final int ORDER_BY_MAX_LENGTH = 500;
/**
* 检查字符,防止注入绕过
*/
public static String escapeOrderBySql(String value)
{
if (StringUtils.isNotEmpty(value) && !isValidOrderBySql(value))
{
throw new UtilException("参数不符合规范,不能进行查询");
}
if (StringUtils.length(value) > ORDER_BY_MAX_LENGTH)
{
throw new UtilException("参数已超过最大限制,不能进行查询");
}
return value;
}
/**
* 验证 order by 语法是否符合规范
*/
public static boolean isValidOrderBySql(String value)
{
return value.matches(SQL_PATTERN);
}
/**
* SQL关键字检查
*/
public static void filterKeyword(String value)
{
if (StringUtils.isEmpty(value))
{
return;
}
String[] sqlKeywords = StringUtils.split(SQL_REGEX, "\\|");
for (String sqlKeyword : sqlKeywords)
{
if (StringUtils.indexOfIgnoreCase(value, sqlKeyword) > -1)
{
throw new UtilException("参数存在SQL注入风险");
}
}
}
}
|
2977094657/BilibiliHistoryFetcher
| 20,269
|
scripts/dynamic_db.py
|
import os
import json
import sqlite3
from datetime import datetime
from typing import Iterable, Optional, Tuple, Dict, Any, List
from loguru import logger
from scripts.utils import get_database_path
from scripts.dynamic_media import collect_image_urls
def _get_db_path() -> str:
"""获取动态数据数据库路径"""
return get_database_path('bilibili_dynamic.db')
def _ensure_schema(conn: sqlite3.Connection) -> None:
"""确保动态相关表结构存在"""
cursor = conn.cursor()
# 规范化主表:核心信息
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS dynamic_core (
host_mid TEXT NOT NULL,
id_str TEXT NOT NULL,
type TEXT,
visible INTEGER,
publish_ts INTEGER,
comment_id_str TEXT,
comment_type INTEGER,
rid_str TEXT,
txt TEXT,
author_name TEXT,
bvid TEXT,
title TEXT,
cover TEXT,
desc TEXT,
article_title TEXT,
article_covers TEXT,
opus_title TEXT,
opus_summary_text TEXT,
media_locals TEXT,
media_count INTEGER,
live_media_locals TEXT,
live_media_count INTEGER,
fetch_time INTEGER NOT NULL,
PRIMARY KEY (host_mid, id_str)
)
"""
)
# media_urls 字段移除:不需要旧数据迁移
cursor.execute(
"""
CREATE INDEX IF NOT EXISTS idx_dynamic_core_publish_ts
ON dynamic_core(publish_ts)
"""
)
# 作者信息
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS dynamic_author (
host_mid TEXT NOT NULL,
id_str TEXT NOT NULL,
author_mid TEXT,
author_name TEXT,
face TEXT,
PRIMARY KEY (host_mid, id_str)
)
"""
)
# 统计信息
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS dynamic_stat (
host_mid TEXT NOT NULL,
id_str TEXT NOT NULL,
like_count INTEGER,
comment_count INTEGER,
repost_count INTEGER,
view_count INTEGER,
PRIMARY KEY (host_mid, id_str)
)
"""
)
# 话题
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS dynamic_topic (
host_mid TEXT NOT NULL,
id_str TEXT NOT NULL,
topic_name TEXT,
jump_url TEXT,
PRIMARY KEY (host_mid, id_str)
)
"""
)
# major: 图文(opus)
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS major_opus_pics (
host_mid TEXT NOT NULL,
id_str TEXT NOT NULL,
idx INTEGER NOT NULL,
url TEXT NOT NULL,
PRIMARY KEY (host_mid, id_str, idx)
)
"""
)
# major: archive 跳转URL列表(逐条展开)
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS major_archive_jump_urls (
host_mid TEXT NOT NULL,
id_str TEXT NOT NULL,
idx INTEGER NOT NULL,
url TEXT NOT NULL,
PRIMARY KEY (host_mid, id_str, idx)
)
"""
)
conn.commit()
def get_connection() -> sqlite3.Connection:
"""获取数据库连接(并自动创建表结构)"""
db_path = _get_db_path()
os.makedirs(os.path.dirname(db_path), exist_ok=True)
conn = sqlite3.connect(db_path)
_ensure_schema(conn)
return conn
def dynamic_core_exists(conn: sqlite3.Connection, host_mid: int, id_str: str) -> bool:
"""判断某条动态是否已存在于核心表中"""
cursor = conn.cursor()
cursor.execute(
"""
SELECT 1 FROM dynamic_core WHERE host_mid = ? AND id_str = ? LIMIT 1
""",
(str(host_mid), str(id_str)),
)
return cursor.fetchone() is not None
def _to_int(value: Any) -> Optional[int]:
try:
if value is None:
return None
return int(value)
except Exception:
return None
def save_normalized_dynamic_item(conn: sqlite3.Connection, host_mid: int, item: Dict[str, Any]) -> None:
"""将动态条目按多表结构保存/更新
- 核心信息 dynamic_core
- 作者 dynamic_author
- 统计 dynamic_stat
- 正文 dynamic_desc
- 话题 dynamic_topic
- major 分类型:archive / draw / article / opus
"""
cursor = conn.cursor()
fetch_time = int(datetime.now().timestamp())
logger.debug(f"normalize.begin host_mid={host_mid}")
basic = item.get("basic", {}) if isinstance(item, dict) else {}
modules_raw = item.get("modules")
# 兼容 modules 既可能为对象也可能为数组
module_author = {}
module_stat = {}
module_dynamic = {}
if isinstance(modules_raw, dict):
module_author = modules_raw.get("module_author", {})
module_stat = modules_raw.get("module_stat", {})
module_dynamic = modules_raw.get("module_dynamic", {})
elif isinstance(modules_raw, list):
for mod in modules_raw:
if not isinstance(mod, dict):
continue
mtype = mod.get("module_type")
# 新版结构将内容放在同级键名里,例如 {"module_author": {...}, "module_type": "MODULE_TYPE_AUTHOR"}
if mtype == "MODULE_TYPE_AUTHOR" and not module_author:
module_author = mod.get("module_author", {})
elif mtype == "MODULE_TYPE_STAT" and not module_stat:
module_stat = mod.get("module_stat", {})
elif mtype == "MODULE_TYPE_DYNAMIC" and not module_dynamic:
module_dynamic = mod.get("module_dynamic", {})
id_str = (
item.get("id_str")
or basic.get("id_str")
or str(item.get("id"))
)
if not id_str:
logger.warning("normalize.skip: missing id_str")
return
logger.debug(f"normalize.id id_str={id_str}")
# 核心信息
publish_ts = _to_int(module_author.get("pub_ts"))
comment_id_str = basic.get("comment_id_str")
comment_type = _to_int(basic.get("comment_type"))
rid_str = basic.get("rid_str")
visible = item.get("visible")
# 作者名/头像URL
author_name = module_author.get("name") or module_author.get("uname")
avatar = module_author.get("avatar")
if isinstance(avatar, dict):
fl = avatar.get("fallback_layers", {})
layers = fl.get("layers") if isinstance(fl, dict) else None
if isinstance(layers, list):
for layer in layers:
if not isinstance(layer, dict):
continue
res = layer.get("resource", {})
if isinstance(res, dict):
res_img = res.get("res_image", {})
if isinstance(res_img, dict):
img_src = res_img.get("image_src", {})
if isinstance(img_src, dict):
remote = img_src.get("remote", {})
if isinstance(remote, dict) and remote.get("url"):
author_face_url = remote.get("url")
break
# 文本
txt = None
desc_obj = module_dynamic.get("desc") if isinstance(module_dynamic, dict) else None
if isinstance(desc_obj, dict):
txt = desc_obj.get("text")
elif isinstance(desc_obj, str):
txt = desc_obj
if not txt and isinstance(modules_raw, list):
for mod in modules_raw:
if isinstance(mod, dict) and isinstance(mod.get("module_desc"), dict):
txt = mod.get("module_desc", {}).get("text")
if txt:
break
# 媒体信息不再从单独表中获取,直接设置为空
media_locals_joined = None
media_count = 0
# 提取 archive、article 和 opus 信息到核心表
archive_bvid = None
archive_title = None
archive_cover = None
archive_desc = None
article_title = None
article_covers = None
opus_title = None
opus_summary_text = None
major = module_dynamic.get("major") if isinstance(module_dynamic, dict) else None
if isinstance(major, dict):
if isinstance(major.get("archive"), dict):
arc = major["archive"]
archive_bvid = arc.get("bvid")
archive_title = arc.get("title")
archive_cover = arc.get("cover")
archive_desc = arc.get("desc")
if isinstance(major.get("article"), dict):
ar = major["article"]
article_title = ar.get("title")
covers = ar.get("covers") if isinstance(ar, dict) else None
if isinstance(covers, list) and covers:
import json
article_covers = json.dumps(covers)
if isinstance(major.get("opus"), dict):
opus = major["opus"]
opus_title = opus.get("title")
summary = opus.get("summary")
if isinstance(summary, dict):
opus_summary_text = summary.get("text")
logger.info(f"normalize.core.upsert begin host_mid={host_mid} id_str={id_str} media_count={media_count}")
cursor.execute(
"""
INSERT INTO dynamic_core (host_mid, id_str, type, visible, publish_ts, comment_id_str, comment_type, rid_str,
txt, author_name, bvid, title, cover, desc, article_title, article_covers,
opus_title, opus_summary_text, media_locals, media_count, live_media_locals, live_media_count,
fetch_time)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(host_mid, id_str) DO UPDATE SET
type = excluded.type,
visible = excluded.visible,
publish_ts = excluded.publish_ts,
comment_id_str = excluded.comment_id_str,
comment_type = excluded.comment_type,
rid_str = excluded.rid_str,
txt = excluded.txt,
author_name = excluded.author_name,
bvid = excluded.bvid,
title = excluded.title,
cover = excluded.cover,
desc = excluded.desc,
article_title = excluded.article_title,
article_covers = excluded.article_covers,
opus_title = excluded.opus_title,
opus_summary_text = excluded.opus_summary_text,
media_locals = excluded.media_locals,
media_count = excluded.media_count,
live_media_locals = excluded.live_media_locals,
live_media_count = excluded.live_media_count,
fetch_time = excluded.fetch_time
""",
(
str(host_mid),
str(id_str),
item.get("type"),
1 if visible else 0 if visible is not None else None,
publish_ts,
comment_id_str,
comment_type,
rid_str,
txt,
author_name,
archive_bvid,
archive_title,
archive_cover,
archive_desc if isinstance(archive_desc, str) else None,
article_title,
article_covers,
opus_title,
opus_summary_text,
media_locals_joined,
media_count,
None, # live_media_locals - 暂时设为None,稍后在路由中处理
0, # live_media_count - 暂时设为0
fetch_time,
),
)
logger.info(f"normalize.core.saved host_mid={host_mid} id_str={id_str}")
# 作者
author_mid = module_author.get("mid") or module_author.get("id")
author_name = module_author.get("name") or module_author.get("uname")
face = module_author.get("face")
cursor.execute(
"""
INSERT INTO dynamic_author (host_mid, id_str, author_mid, author_name, face)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(host_mid, id_str) DO UPDATE SET
author_mid = excluded.author_mid,
author_name = excluded.author_name,
face = excluded.face
""",
(
str(host_mid),
str(id_str),
str(author_mid) if author_mid is not None else None,
author_name,
face,
),
)
# 统计
like_count = _to_int(
module_stat.get("like") if isinstance(module_stat.get("like"), (int, str)) else (module_stat.get("like", {}).get("count") if isinstance(module_stat.get("like"), dict) else None)
)
comment_count = _to_int(
module_stat.get("comment") if isinstance(module_stat.get("comment"), (int, str)) else (module_stat.get("comment", {}).get("count") if isinstance(module_stat.get("comment"), dict) else None)
)
repost_count = _to_int(
module_stat.get("repost") if isinstance(module_stat.get("repost"), (int, str)) else (module_stat.get("forward", {}).get("count") if isinstance(module_stat.get("forward"), dict) else None)
)
view_count = _to_int(
module_stat.get("view") if isinstance(module_stat.get("view"), (int, str)) else (module_stat.get("view", {}).get("count") if isinstance(module_stat.get("view"), dict) else None)
)
cursor.execute(
"""
INSERT INTO dynamic_stat (host_mid, id_str, like_count, comment_count, repost_count, view_count)
VALUES (?, ?, ?, ?, ?, ?)
ON CONFLICT(host_mid, id_str) DO UPDATE SET
like_count = excluded.like_count,
comment_count = excluded.comment_count,
repost_count = excluded.repost_count,
view_count = excluded.view_count
""",
(
str(host_mid),
str(id_str),
like_count,
comment_count,
repost_count,
view_count,
),
)
conn.commit()
def list_hosts_with_stats(
conn: sqlite3.Connection,
limit: int = 50,
offset: int = 0,
) -> List[Dict[str, Any]]:
"""列出数据库中已有动态的UP(host_mid)及统计信息。
返回字段:host_mid, item_count, core_count, last_publish_ts, last_fetch_time
"""
cursor = conn.cursor()
logger.debug(f"db.list_hosts_with_stats begin limit={limit} offset={offset}")
rows = cursor.execute(
(
"""
SELECT dc.host_mid AS host_mid,
COUNT(*) AS item_count,
COUNT(*) AS core_count,
MAX(dc.publish_ts) AS last_publish_ts,
MAX(dc.fetch_time) AS last_fetch_time
FROM dynamic_core AS dc
GROUP BY dc.host_mid
ORDER BY (MAX(dc.publish_ts) IS NULL) ASC, MAX(dc.publish_ts) DESC, COUNT(*) DESC
LIMIT ? OFFSET ?
"""
),
(limit, offset),
).fetchall()
results: List[Dict[str, Any]] = []
for r in rows:
host_mid, item_count, core_count, last_publish_ts, last_fetch_time = r
results.append(
{
"host_mid": str(host_mid) if host_mid is not None else None,
"item_count": int(item_count) if item_count is not None else 0,
"core_count": int(core_count) if core_count is not None else 0,
"last_publish_ts": int(last_publish_ts) if last_publish_ts is not None else None,
"last_fetch_time": int(last_fetch_time) if last_fetch_time is not None else None,
}
)
logger.debug(f"db.list_hosts_with_stats done count={len(results)}")
return results
def list_dynamics_for_host(
conn: sqlite3.Connection,
host_mid: int,
limit: int = 20,
offset: int = 0,
) -> Dict[str, Any]:
"""列出指定UP的动态,优先从 dynamic_core 读取;若无则回退到 dynamic_items。
返回:
{
"source": "core" | "items",
"total": int,
"items": [ { id_str, type, visible, publish_ts, txt, author_name, media_count, media_urls, media_locals, fetch_time } ]
}
"""
cursor = conn.cursor()
host_mid_str = str(host_mid)
# 统计 core 数量
core_total_row = cursor.execute(
"SELECT COUNT(*) FROM dynamic_core WHERE host_mid = ?",
(host_mid_str,),
).fetchone()
core_total = int(core_total_row[0]) if core_total_row and core_total_row[0] is not None else 0
if core_total > 0:
rows = cursor.execute(
(
"""
SELECT host_mid, id_str, type, visible, publish_ts, comment_id_str, comment_type, rid_str,
txt, author_name, bvid, title, cover, desc, article_title, article_covers,
opus_title, opus_summary_text, media_locals, media_count, live_media_locals, live_media_count, fetch_time
FROM dynamic_core
WHERE host_mid = ?
ORDER BY (publish_ts IS NULL) ASC, publish_ts DESC, fetch_time DESC
LIMIT ? OFFSET ?
"""
),
(host_mid_str, limit, offset),
).fetchall()
items: List[Dict[str, Any]] = []
for r in rows:
(
_host_mid,
id_str,
dynamic_type,
visible,
publish_ts,
comment_id_str,
comment_type,
rid_str,
txt,
author_name,
bvid,
title,
cover,
desc,
article_title,
article_covers,
opus_title,
opus_summary_text,
media_locals,
media_count,
live_media_locals,
live_media_count,
fetch_time,
) = r
items.append(
{
"host_mid": str(_host_mid) if _host_mid is not None else host_mid_str,
"id_str": str(id_str) if id_str is not None else None,
"type": dynamic_type,
"visible": int(visible) if visible is not None else None,
"publish_ts": int(publish_ts) if publish_ts is not None else None,
"comment_id_str": comment_id_str,
"comment_type": int(comment_type) if comment_type is not None else None,
"rid_str": rid_str,
"txt": txt,
"author_name": author_name,
"bvid": bvid,
"title": title,
"cover": cover,
"desc": desc,
"article_title": article_title,
"article_covers": article_covers,
"opus_title": opus_title,
"opus_summary_text": opus_summary_text,
"media_locals": media_locals,
"media_count": int(media_count) if media_count is not None else None,
"live_media_locals": live_media_locals,
"live_media_count": int(live_media_count) if live_media_count is not None else None,
"fetch_time": int(fetch_time) if fetch_time is not None else None,
}
)
return {"source": "core", "total": core_total, "items": items}
# 如果没有core数据,返回空结果
return {"source": "core", "total": 0, "items": []}
def purge_host(conn: sqlite3.Connection, host_mid: int) -> Dict[str, int]:
"""按 host_mid 删除与动态相关的所有数据库内容。
删除范围:
- dynamic_core
- dynamic_author
- dynamic_stat
- dynamic_topic
- major_opus_pics
- major_archive_jump_urls
Returns:
Dict[str, int]: 各表删除数量与总数统计,如 {"dynamic_core": 10, ..., "total": 42}
"""
cursor = conn.cursor()
host_mid_str = str(host_mid)
# 为避开潜在的外键约束问题(虽然当前表未启用约束),从“子表”到“主表”的顺序删除
tables = [
"major_archive_jump_urls",
"major_opus_pics",
"dynamic_topic",
"dynamic_stat",
"dynamic_author",
"dynamic_core",
]
stats: Dict[str, int] = {}
total = 0
for t in tables:
try:
cursor.execute(f"DELETE FROM {t} WHERE host_mid = ?", (host_mid_str,))
# sqlite3 rowcount 返回本次操作影响的行数;若不可用则置 0
affected = cursor.rowcount if cursor.rowcount is not None else 0
stats[t] = affected
total += affected
except Exception as e:
# 不中断整个清理,记录警告并继续
logger.warning(f"purge_host: delete from {t} failed: {e}")
stats[t] = 0
try:
conn.commit()
except Exception as e:
logger.warning(f"purge_host: commit failed: {e}")
stats["total"] = total
return stats
|
2929004360/ruoyi-sign
| 13,319
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/reflect/ReflectUtils.java
|
package com.ruoyi.common.utils.reflect;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Date;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.poi.ss.usermodel.DateUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ruoyi.common.core.text.Convert;
import com.ruoyi.common.utils.DateUtils;
/**
* 反射工具类. 提供调用getter/setter方法, 访问私有变量, 调用私有方法, 获取泛型类型Class, 被AOP过的真实类等工具函数.
*
* @author ruoyi
*/
@SuppressWarnings("rawtypes")
public class ReflectUtils
{
private static final String SETTER_PREFIX = "set";
private static final String GETTER_PREFIX = "get";
private static final String CGLIB_CLASS_SEPARATOR = "$$";
private static Logger logger = LoggerFactory.getLogger(ReflectUtils.class);
/**
* 调用Getter方法.
* 支持多级,如:对象名.对象名.方法
*/
@SuppressWarnings("unchecked")
public static <E> E invokeGetter(Object obj, String propertyName)
{
Object object = obj;
for (String name : StringUtils.split(propertyName, "."))
{
String getterMethodName = GETTER_PREFIX + StringUtils.capitalize(name);
object = invokeMethod(object, getterMethodName, new Class[] {}, new Object[] {});
}
return (E) object;
}
/**
* 调用Setter方法, 仅匹配方法名。
* 支持多级,如:对象名.对象名.方法
*/
public static <E> void invokeSetter(Object obj, String propertyName, E value)
{
Object object = obj;
String[] names = StringUtils.split(propertyName, ".");
for (int i = 0; i < names.length; i++)
{
if (i < names.length - 1)
{
String getterMethodName = GETTER_PREFIX + StringUtils.capitalize(names[i]);
object = invokeMethod(object, getterMethodName, new Class[] {}, new Object[] {});
}
else
{
String setterMethodName = SETTER_PREFIX + StringUtils.capitalize(names[i]);
invokeMethodByName(object, setterMethodName, new Object[] { value });
}
}
}
/**
* 直接读取对象属性值, 无视private/protected修饰符, 不经过getter函数.
*/
@SuppressWarnings("unchecked")
public static <E> E getFieldValue(final Object obj, final String fieldName)
{
Field field = getAccessibleField(obj, fieldName);
if (field == null)
{
logger.debug("在 [" + obj.getClass() + "] 中,没有找到 [" + fieldName + "] 字段 ");
return null;
}
E result = null;
try
{
result = (E) field.get(obj);
}
catch (IllegalAccessException e)
{
logger.error("不可能抛出的异常{}", e.getMessage());
}
return result;
}
/**
* 直接设置对象属性值, 无视private/protected修饰符, 不经过setter函数.
*/
public static <E> void setFieldValue(final Object obj, final String fieldName, final E value)
{
Field field = getAccessibleField(obj, fieldName);
if (field == null)
{
// throw new IllegalArgumentException("在 [" + obj.getClass() + "] 中,没有找到 [" + fieldName + "] 字段 ");
logger.debug("在 [" + obj.getClass() + "] 中,没有找到 [" + fieldName + "] 字段 ");
return;
}
try
{
field.set(obj, value);
}
catch (IllegalAccessException e)
{
logger.error("不可能抛出的异常: {}", e.getMessage());
}
}
/**
* 直接调用对象方法, 无视private/protected修饰符.
* 用于一次性调用的情况,否则应使用getAccessibleMethod()函数获得Method后反复调用.
* 同时匹配方法名+参数类型,
*/
@SuppressWarnings("unchecked")
public static <E> E invokeMethod(final Object obj, final String methodName, final Class<?>[] parameterTypes,
final Object[] args)
{
if (obj == null || methodName == null)
{
return null;
}
Method method = getAccessibleMethod(obj, methodName, parameterTypes);
if (method == null)
{
logger.debug("在 [" + obj.getClass() + "] 中,没有找到 [" + methodName + "] 方法 ");
return null;
}
try
{
return (E) method.invoke(obj, args);
}
catch (Exception e)
{
String msg = "method: " + method + ", obj: " + obj + ", args: " + args + "";
throw convertReflectionExceptionToUnchecked(msg, e);
}
}
/**
* 直接调用对象方法, 无视private/protected修饰符,
* 用于一次性调用的情况,否则应使用getAccessibleMethodByName()函数获得Method后反复调用.
* 只匹配函数名,如果有多个同名函数调用第一个。
*/
@SuppressWarnings("unchecked")
public static <E> E invokeMethodByName(final Object obj, final String methodName, final Object[] args)
{
Method method = getAccessibleMethodByName(obj, methodName, args.length);
if (method == null)
{
// 如果为空不报错,直接返回空。
logger.debug("在 [" + obj.getClass() + "] 中,没有找到 [" + methodName + "] 方法 ");
return null;
}
try
{
// 类型转换(将参数数据类型转换为目标方法参数类型)
Class<?>[] cs = method.getParameterTypes();
for (int i = 0; i < cs.length; i++)
{
if (args[i] != null && !args[i].getClass().equals(cs[i]))
{
if (cs[i] == String.class)
{
args[i] = Convert.toStr(args[i]);
if (StringUtils.endsWith((String) args[i], ".0"))
{
args[i] = StringUtils.substringBefore((String) args[i], ".0");
}
}
else if (cs[i] == Integer.class)
{
args[i] = Convert.toInt(args[i]);
}
else if (cs[i] == Long.class)
{
args[i] = Convert.toLong(args[i]);
}
else if (cs[i] == Double.class)
{
args[i] = Convert.toDouble(args[i]);
}
else if (cs[i] == Float.class)
{
args[i] = Convert.toFloat(args[i]);
}
else if (cs[i] == Date.class)
{
if (args[i] instanceof String)
{
args[i] = DateUtils.parseDate(args[i]);
}
else
{
args[i] = DateUtil.getJavaDate((Double) args[i]);
}
}
else if (cs[i] == boolean.class || cs[i] == Boolean.class)
{
args[i] = Convert.toBool(args[i]);
}
}
}
return (E) method.invoke(obj, args);
}
catch (Exception e)
{
String msg = "method: " + method + ", obj: " + obj + ", args: " + args + "";
throw convertReflectionExceptionToUnchecked(msg, e);
}
}
/**
* 循环向上转型, 获取对象的DeclaredField, 并强制设置为可访问.
* 如向上转型到Object仍无法找到, 返回null.
*/
public static Field getAccessibleField(final Object obj, final String fieldName)
{
// 为空不报错。直接返回 null
if (obj == null)
{
return null;
}
Validate.notBlank(fieldName, "fieldName can't be blank");
for (Class<?> superClass = obj.getClass(); superClass != Object.class; superClass = superClass.getSuperclass())
{
try
{
Field field = superClass.getDeclaredField(fieldName);
makeAccessible(field);
return field;
}
catch (NoSuchFieldException e)
{
continue;
}
}
return null;
}
/**
* 循环向上转型, 获取对象的DeclaredMethod,并强制设置为可访问.
* 如向上转型到Object仍无法找到, 返回null.
* 匹配函数名+参数类型。
* 用于方法需要被多次调用的情况. 先使用本函数先取得Method,然后调用Method.invoke(Object obj, Object... args)
*/
public static Method getAccessibleMethod(final Object obj, final String methodName,
final Class<?>... parameterTypes)
{
// 为空不报错。直接返回 null
if (obj == null)
{
return null;
}
Validate.notBlank(methodName, "methodName can't be blank");
for (Class<?> searchType = obj.getClass(); searchType != Object.class; searchType = searchType.getSuperclass())
{
try
{
Method method = searchType.getDeclaredMethod(methodName, parameterTypes);
makeAccessible(method);
return method;
}
catch (NoSuchMethodException e)
{
continue;
}
}
return null;
}
/**
* 循环向上转型, 获取对象的DeclaredMethod,并强制设置为可访问.
* 如向上转型到Object仍无法找到, 返回null.
* 只匹配函数名。
* 用于方法需要被多次调用的情况. 先使用本函数先取得Method,然后调用Method.invoke(Object obj, Object... args)
*/
public static Method getAccessibleMethodByName(final Object obj, final String methodName, int argsNum)
{
// 为空不报错。直接返回 null
if (obj == null)
{
return null;
}
Validate.notBlank(methodName, "methodName can't be blank");
for (Class<?> searchType = obj.getClass(); searchType != Object.class; searchType = searchType.getSuperclass())
{
Method[] methods = searchType.getDeclaredMethods();
for (Method method : methods)
{
if (method.getName().equals(methodName) && method.getParameterTypes().length == argsNum)
{
makeAccessible(method);
return method;
}
}
}
return null;
}
/**
* 改变private/protected的方法为public,尽量不调用实际改动的语句,避免JDK的SecurityManager抱怨。
*/
public static void makeAccessible(Method method)
{
if ((!Modifier.isPublic(method.getModifiers()) || !Modifier.isPublic(method.getDeclaringClass().getModifiers()))
&& !method.isAccessible())
{
method.setAccessible(true);
}
}
/**
* 改变private/protected的成员变量为public,尽量不调用实际改动的语句,避免JDK的SecurityManager抱怨。
*/
public static void makeAccessible(Field field)
{
if ((!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers())
|| Modifier.isFinal(field.getModifiers())) && !field.isAccessible())
{
field.setAccessible(true);
}
}
/**
* 通过反射, 获得Class定义中声明的泛型参数的类型, 注意泛型必须定义在父类处
* 如无法找到, 返回Object.class.
*/
@SuppressWarnings("unchecked")
public static <T> Class<T> getClassGenricType(final Class clazz)
{
return getClassGenricType(clazz, 0);
}
/**
* 通过反射, 获得Class定义中声明的父类的泛型参数的类型.
* 如无法找到, 返回Object.class.
*/
public static Class getClassGenricType(final Class clazz, final int index)
{
Type genType = clazz.getGenericSuperclass();
if (!(genType instanceof ParameterizedType))
{
logger.debug(clazz.getSimpleName() + "'s superclass not ParameterizedType");
return Object.class;
}
Type[] params = ((ParameterizedType) genType).getActualTypeArguments();
if (index >= params.length || index < 0)
{
logger.debug("Index: " + index + ", Size of " + clazz.getSimpleName() + "'s Parameterized Type: "
+ params.length);
return Object.class;
}
if (!(params[index] instanceof Class))
{
logger.debug(clazz.getSimpleName() + " not set the actual class on superclass generic parameter");
return Object.class;
}
return (Class) params[index];
}
public static Class<?> getUserClass(Object instance)
{
if (instance == null)
{
throw new RuntimeException("Instance must not be null");
}
Class clazz = instance.getClass();
if (clazz != null && clazz.getName().contains(CGLIB_CLASS_SEPARATOR))
{
Class<?> superClass = clazz.getSuperclass();
if (superClass != null && !Object.class.equals(superClass))
{
return superClass;
}
}
return clazz;
}
/**
* 将反射时的checked exception转换为unchecked exception.
*/
public static RuntimeException convertReflectionExceptionToUnchecked(String msg, Exception e)
{
if (e instanceof IllegalAccessException || e instanceof IllegalArgumentException
|| e instanceof NoSuchMethodException)
{
return new IllegalArgumentException(msg, e);
}
else if (e instanceof InvocationTargetException)
{
return new RuntimeException(msg, ((InvocationTargetException) e).getTargetException());
}
return new RuntimeException(msg, e);
}
}
|
2977094657/BilibiliHistoryFetcher
| 12,159
|
scripts/dynamic_media.py
|
import asyncio
import hashlib
import os
from typing import Dict, Iterable, List, Set, Tuple, Any
from urllib.parse import urlparse
import aiohttp
import aiofiles
def _looks_like_image_url(url: str) -> bool:
if not isinstance(url, str):
return False
if not url.startswith("http://") and not url.startswith("https://"):
return False
lower = url.lower()
if any(ext in lower for ext in [".jpg", ".jpeg", ".png", ".gif", ".webp"]):
return True
# 常见B站图片CDN路径包含 /bfs/,即便未带扩展名
if "/bfs/" in lower:
return True
return False
def _walk_collect_urls(obj: Any, path: List[str], collector: Set[str]) -> None:
"""递归遍历对象并按上下文收集图片URL,排除标签、头像、头像框(pendant)和表情相关图片"""
# 路径关键字,用于排除
path_lower = [p.lower() for p in path]
def is_label_context() -> bool:
return any(p == "label" for p in path_lower)
def is_avatar_context() -> bool:
return any(p in ("avatar", "face", "avatar_subscript_url") for p in path_lower)
def is_decorate_card_context() -> bool:
return any(p in ("decorate", "decorate_card", "decoration_card") for p in path_lower)
def is_pendant_context() -> bool:
# 头像框字段:module_author.pendant.*(仅在 pendant/pendent 上下文才判定)
return any(p in ("pendant", "pendent") for p in path_lower)
def is_interaction_context() -> bool:
return any(p == "module_interaction" for p in path_lower)
def is_emoji_context() -> bool:
"""检查是否是表情上下文"""
if isinstance(obj, dict):
# 检查是否是表情节点
if obj.get("type") == "RICH_TEXT_NODE_TYPE_EMOJI":
return True
# 检查是否在表情数据结构中
if "emoji" in obj and isinstance(obj["emoji"], dict):
emoji_data = obj["emoji"]
if "icon_url" in emoji_data and "text" in emoji_data:
return True
return False
# 如果当前路径已经落在 label、avatar、pendant、表情或其它不需要区域,直接不收集其下任何URL
if (
is_label_context()
or is_avatar_context()
or is_pendant_context()
or is_decorate_card_context()
or is_interaction_context()
or is_emoji_context()
):
return
if isinstance(obj, dict):
for k, v in obj.items():
# 对于已知的标签字段,跳过
k_lower = str(k).lower()
if k_lower in (
"img_label_uri_hans",
"img_label_uri_hans_static",
"img_label_uri_hant",
"img_label_uri_hant_static",
"label_theme",
):
continue
_walk_collect_urls(v, path + [str(k)], collector)
elif isinstance(obj, list):
for idx, v in enumerate(obj):
_walk_collect_urls(v, path + [str(idx)], collector)
elif isinstance(obj, str):
s = obj
if _looks_like_image_url(s):
# 额外基于URL路径排除头像与头像框类
low = s.lower()
# 头像
if "/bfs/face/" in low or "/face/" in low:
return
# 头像框(pendant)常见路径:/bfs/garb/item/
if "/bfs/garb/item/" in low:
return
# 其它图片加入集合
collector.add(s)
def collect_image_urls(dynamic_item: Dict) -> List[str]:
"""从动态条目中抽取图片类URL(包含视频封面),排除标签图片与头像图片"""
urls: Set[str] = set()
_walk_collect_urls(dynamic_item, [], urls)
return list(urls)
def collect_live_media_urls(dynamic_item: Dict) -> List[Tuple[str, str]]:
"""从动态条目中抽取实况媒体URL,返回(image_url, video_url)的元组列表"""
live_media: List[Tuple[str, str]] = []
def _extract_live_media(obj: Any, path: List[str]) -> None:
if isinstance(obj, dict):
# 检查是否包含live_url字段
if "live_url" in obj and "url" in obj:
image_url = obj.get("url")
live_url = obj.get("live_url")
if image_url and live_url and live_url != "null":
live_media.append((image_url, live_url))
for k, v in obj.items():
_extract_live_media(v, path + [str(k)])
elif isinstance(obj, list):
for i, item in enumerate(obj):
_extract_live_media(item, path + [str(i)])
_extract_live_media(dynamic_item, [])
return live_media
def collect_emoji_urls(dynamic_item: Dict) -> List[Tuple[str, str]]:
"""从动态条目中抽取表情URL,返回(emoji_url, emoji_text)的元组列表"""
emoji_list: List[Tuple[str, str]] = []
def _extract_emojis(obj: Any, path: List[str]) -> None:
# 路径上下文:忽略互动区(module_interaction)的热评表情
path_lower = [p.lower() for p in path]
if any(p == "module_interaction" for p in path_lower):
return
if isinstance(obj, dict):
# 检查是否是表情节点
if (obj.get("type") == "RICH_TEXT_NODE_TYPE_EMOJI" and
"emoji" in obj and isinstance(obj["emoji"], dict)):
emoji_data = obj["emoji"]
icon_url = emoji_data.get("icon_url")
text = emoji_data.get("text", "")
if icon_url and text:
# 去掉文本中的方括号
clean_text = text.strip("[]")
if clean_text:
emoji_list.append((icon_url, clean_text))
for k, v in obj.items():
_extract_emojis(v, path + [str(k)])
elif isinstance(obj, list):
for i, item in enumerate(obj):
_extract_emojis(item, path + [str(i)])
_extract_emojis(dynamic_item, [])
return emoji_list
def _guess_extension(url: str) -> str:
path = urlparse(url).path
_, ext = os.path.splitext(path)
ext = (ext or "").lower()
if ext in [".jpg", ".jpeg", ".png", ".gif", ".webp"]:
return ext
return ".jpg"
def _hash_name(url: str) -> str:
return hashlib.md5(url.encode("utf-8")).hexdigest()
def predict_image_path(url: str, save_dir: str) -> str:
"""基于链接原始文件名预测保存路径(不依赖下载结果)"""
os.makedirs(save_dir, exist_ok=True)
path = urlparse(url).path
base = os.path.basename(path)
if not base:
base = _hash_name(url) + _guess_extension(url)
# 确保有扩展名
root, ext = os.path.splitext(base)
if not ext:
base = f"{base}{_guess_extension(url)}"
return os.path.join(save_dir, base)
async def _download_one(session: aiohttp.ClientSession, url: str, save_dir: str) -> Tuple[str, str, bool]:
os.makedirs(save_dir, exist_ok=True)
save_path = predict_image_path(url, save_dir)
try:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=20)) as resp:
if resp.status != 200:
return url, save_path, False
content_type = resp.headers.get("content-type", "").lower()
if "image" not in content_type:
return url, save_path, False
data = await resp.read()
async with aiofiles.open(save_path, "wb") as f:
await f.write(data)
return url, save_path, True
except Exception:
return url, save_path, False
async def download_images(urls: Iterable[str], save_dir: str, concurrency: int = 6) -> List[Tuple[str, str, bool]]:
unique_urls = list(dict.fromkeys(urls))
if not unique_urls:
return []
sem = asyncio.Semaphore(concurrency)
async with aiohttp.ClientSession(headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
'Referer': 'https://www.bilibili.com/'
}) as session:
async def bound(u: str):
async with sem:
return await _download_one(session, u, save_dir)
tasks = [bound(u) for u in unique_urls]
results = await asyncio.gather(*tasks, return_exceptions=False)
return results
async def _download_live_media(session: aiohttp.ClientSession, image_url: str, video_url: str, save_dir: str) -> Tuple[str, str, str, str, bool]:
"""下载实况媒体(图片和视频),返回(image_url, video_url, image_path, video_path, success)"""
os.makedirs(save_dir, exist_ok=True)
# 生成文件名
image_name = _hash_name(image_url) + _guess_extension(image_url)
video_name = _hash_name(video_url) + ".mp4" # live视频通常是mp4格式
image_path = os.path.join(save_dir, image_name)
video_path = os.path.join(save_dir, video_name)
try:
# 下载图片
image_success = False
if not os.path.exists(image_path):
async with session.get(image_url) as resp:
if resp.status == 200:
data = await resp.read()
async with aiofiles.open(image_path, "wb") as f:
await f.write(data)
image_success = True
else:
image_success = True
# 下载视频
video_success = False
if not os.path.exists(video_path):
async with session.get(video_url) as resp:
if resp.status == 200:
data = await resp.read()
async with aiofiles.open(video_path, "wb") as f:
await f.write(data)
video_success = True
else:
video_success = True
return image_url, video_url, image_path, video_path, (image_success and video_success)
except Exception:
return image_url, video_url, image_path, video_path, False
async def download_live_media(live_media_pairs: List[Tuple[str, str]], save_dir: str, concurrency: int = 3) -> List[Tuple[str, str, str, str, bool]]:
"""下载实况媒体列表,返回下载结果"""
if not live_media_pairs:
return []
sem = asyncio.Semaphore(concurrency)
async with aiohttp.ClientSession(headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
'Referer': 'https://www.bilibili.com/'
}) as session:
async def bound(pair: Tuple[str, str]):
async with sem:
image_url, video_url = pair
return await _download_live_media(session, image_url, video_url, save_dir)
tasks = [bound(pair) for pair in live_media_pairs]
results = await asyncio.gather(*tasks, return_exceptions=False)
return results
async def _download_emoji(session: aiohttp.ClientSession, emoji_url: str, emoji_text: str, save_dir: str) -> Tuple[str, str, bool]:
"""下载单个表情,返回(emoji_url, emoji_path, success)"""
os.makedirs(save_dir, exist_ok=True)
# 使用表情文本作为文件名,并添加.png扩展名
# 清理文件名中的非法字符
import re
safe_name = re.sub(r'[<>:"/\\|?*]', '_', emoji_text)
emoji_name = f"{safe_name}.png"
emoji_path = os.path.join(save_dir, emoji_name)
try:
if not os.path.exists(emoji_path):
async with session.get(emoji_url) as resp:
if resp.status == 200:
data = await resp.read()
async with aiofiles.open(emoji_path, "wb") as f:
await f.write(data)
return emoji_url, emoji_path, True
else:
return emoji_url, emoji_path, True
except Exception:
pass
return emoji_url, emoji_path, False
async def download_emojis(emoji_pairs: List[Tuple[str, str]], save_dir: str, concurrency: int = 6) -> List[Tuple[str, str, bool]]:
"""下载表情列表,返回下载结果"""
if not emoji_pairs:
return []
sem = asyncio.Semaphore(concurrency)
async with aiohttp.ClientSession(headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
'Referer': 'https://www.bilibili.com/'
}) as session:
async def bound(pair: Tuple[str, str]):
async with sem:
emoji_url, emoji_text = pair
return await _download_emoji(session, emoji_url, emoji_text, save_dir)
tasks = [bound(pair) for pair in emoji_pairs]
results = await asyncio.gather(*tasks, return_exceptions=False)
return results
|
281677160/openwrt-package
| 93,105
|
luci-app-homeproxy/po/templates/homeproxy.pot
|
msgid ""
msgstr "Content-Type: text/plain; charset=UTF-8"
#: htdocs/luci-static/resources/view/homeproxy/status.js:206
msgid "%s log"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1456
msgid "%s nodes removed"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:279
msgid "/etc/homeproxy/certs/..., /etc/acme/..., /etc/ssl/..."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:609
msgid "4 or 6. Not limited if empty."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:775
#: htdocs/luci-static/resources/view/homeproxy/client.js:1209
msgid ""
"<code>%s</code> will be temporarily overwritten to <code>%s</code> after 50 "
"triggers in 30s if not enabled."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1086
#: htdocs/luci-static/resources/view/homeproxy/node.js:1105
#: htdocs/luci-static/resources/view/homeproxy/server.js:768
#: htdocs/luci-static/resources/view/homeproxy/server.js:787
msgid "<strong>Save your configuration before uploading files!</strong>"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:664
msgid "API token"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:670
msgid "Accept any if empty."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1137
msgid "Accept empty query response"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1398
msgid "Access Control"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:647
msgid "Access key ID"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:653
msgid "Access key secret"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:669
#: htdocs/luci-static/resources/view/homeproxy/client.js:1145
msgid "Action"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1055
msgid "Add a DNS rule"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:933
msgid "Add a DNS server"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:396
msgid "Add a node"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:405
msgid "Add a routing node"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:576
msgid "Add a routing rule"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1319
msgid "Add a rule set"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:156
msgid "Add a server"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:974
msgid "Additional headers to be sent to the DNS server."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:957
#: htdocs/luci-static/resources/view/homeproxy/node.js:448
msgid "Address"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:987
msgid "Address resolver"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1019
msgid "Address strategy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:640
msgid "Alibaba Cloud DNS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:186
#: htdocs/luci-static/resources/view/homeproxy/client.js:220
msgid "Aliyun Public DNS (223.5.5.5)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:266
msgid "All ports"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:172
msgid "Allow access from the Internet."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1040
#: htdocs/luci-static/resources/view/homeproxy/node.js:1382
msgid "Allow insecure"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1041
msgid "Allow insecure connection at TLS client."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1383
msgid "Allow insecure connection by default when add nodes from subscriptions."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:874
#: htdocs/luci-static/resources/view/homeproxy/server.js:482
msgid "Allowed payload size is in the request."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:95
msgid "Already at the latest version."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:92
msgid "Already in updating."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:742
#: htdocs/luci-static/resources/view/homeproxy/server.js:391
msgid "Alter ID"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:678
msgid "Alternative HTTP port"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:684
msgid "Alternative TLS port"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1419
msgid "An error occurred during updating subscriptions: %s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1226
msgid "Answer"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:429
#: htdocs/luci-static/resources/view/homeproxy/server.js:176
msgid "AnyTLS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:245
msgid "AnyTLS padding scheme in array."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:799
#: htdocs/luci-static/resources/view/homeproxy/client.js:909
#: htdocs/luci-static/resources/view/homeproxy/client.js:1194
msgid ""
"Append a <code>edns0-subnet</code> OPT extra record with the specified IP "
"prefix to every query by default.<br/>If value is an IP address instead of "
"prefix, <code>/32</code> or <code>/128</code> will be appended automatically."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1072
msgid "Append self-signed certificate"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:407
msgid "Applied"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:400
#: htdocs/luci-static/resources/view/homeproxy/node.js:410
msgid "Apply"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:18
msgid "Are you sure to allow insecure?"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:363
msgid "Auth timeout"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:766
msgid "Authenticated length"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:561
#: htdocs/luci-static/resources/view/homeproxy/server.js:282
msgid "Authentication payload"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:554
#: htdocs/luci-static/resources/view/homeproxy/server.js:275
msgid "Authentication type"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1335
msgid "Auto update"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1336
msgid "Auto update subscriptions and geodata."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:702
msgid "BBR"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:235
msgid "BaiDu"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:556
#: htdocs/luci-static/resources/view/homeproxy/server.js:277
msgid "Base64"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:323
msgid "Based on google/gvisor."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1340
msgid "Binary file"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:453
#: htdocs/luci-static/resources/view/homeproxy/client.js:1411
#: htdocs/luci-static/resources/view/homeproxy/server.js:874
msgid "Bind interface"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1412
msgid ""
"Bind outbound traffic to specific interface. Leave empty to auto detect."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:617
#: htdocs/luci-static/resources/view/homeproxy/client.js:1105
msgid "BitTorrent"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1369
msgid "Blacklist mode"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:365
msgid "Block"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:612
#: htdocs/luci-static/resources/view/homeproxy/client.js:640
#: htdocs/luci-static/resources/view/homeproxy/client.js:1091
#: htdocs/luci-static/resources/view/homeproxy/client.js:1101
#: htdocs/luci-static/resources/view/homeproxy/server.js:869
msgid "Both"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:343
msgid "Bypass CN traffic"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:253
msgid "Bypass mainland China"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:344
msgid "Bypass mainland China traffic via firewall rules by default."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:627
msgid "CA provider"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:221
msgid "CNNIC Public DNS (210.2.4.8)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:700
msgid "CUBIC"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1239
msgid "Cancel"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1077
#: htdocs/luci-static/resources/view/homeproxy/server.js:756
msgid "Certificate path"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:57
msgid "Check"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:105
msgid "Check update"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:217
msgid "China DNS server"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:244
msgid "China IPv4 list version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:248
msgid "China IPv6 list version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:252
msgid "China list version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:629
msgid "Chromium / Cronet"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1064
#: htdocs/luci-static/resources/view/homeproxy/server.js:586
msgid "Cipher suites"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:183
msgid "Cisco Public DNS (208.67.222.222)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:214
msgid "Clean log"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:627
msgid "Client"
msgstr ""
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:14
msgid "Client Settings"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:664
msgid "Client version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:182
msgid "CloudFlare Public DNS (1.1.1.1)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:641
msgid "Cloudflare"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:114
#: htdocs/luci-static/resources/view/homeproxy/server.js:142
#: htdocs/luci-static/resources/view/homeproxy/status.js:176
msgid "Collecting data..."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:267
msgid "Common ports only (bypass P2P traffic)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:698
#: htdocs/luci-static/resources/view/homeproxy/server.js:354
msgid "Congestion control algorithm"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:232
msgid "Connection check"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:255
msgid "Custom routing"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:618
msgid "DNS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1046
msgid "DNS Rules"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:924
msgid "DNS Servers"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:872
msgid "DNS Settings"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:639
msgid "DNS provider"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1055
msgid "DNS rule"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:179
#: htdocs/luci-static/resources/view/homeproxy/client.js:749
#: htdocs/luci-static/resources/view/homeproxy/client.js:933
msgid "DNS server"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:635
msgid "DNS01 challenge"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:619
#: htdocs/luci-static/resources/view/homeproxy/client.js:1106
msgid "DTLS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:136
msgid "Debug"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:17
#: htdocs/luci-static/resources/view/homeproxy/client.js:433
#: htdocs/luci-static/resources/view/homeproxy/client.js:603
#: htdocs/luci-static/resources/view/homeproxy/client.js:755
#: htdocs/luci-static/resources/view/homeproxy/client.js:1082
#: htdocs/luci-static/resources/view/homeproxy/client.js:1380
#: htdocs/luci-static/resources/view/homeproxy/node.js:710
msgid "Default"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:382
#: htdocs/luci-static/resources/view/homeproxy/client.js:434
#: htdocs/luci-static/resources/view/homeproxy/client.js:756
#: htdocs/luci-static/resources/view/homeproxy/client.js:887
#: htdocs/luci-static/resources/view/homeproxy/client.js:994
#: htdocs/luci-static/resources/view/homeproxy/client.js:1160
msgid "Default DNS (issued by WAN)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:882
msgid "Default DNS server"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:377
msgid "Default DNS server for resolving domain name in the server address."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:877
msgid "Default DNS strategy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:357
msgid "Default outbound"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:376
msgid "Default outbound DNS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:358
msgid "Default outbound for connections not matched by any routing rules."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1389
msgid "Default packet encoding"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:606
msgid "Default server name"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:364
#: htdocs/luci-static/resources/view/homeproxy/client.js:466
#: htdocs/luci-static/resources/view/homeproxy/client.js:684
#: htdocs/luci-static/resources/view/homeproxy/client.js:1032
#: htdocs/luci-static/resources/view/homeproxy/client.js:1381
#: htdocs/luci-static/resources/view/homeproxy/node.js:428
msgid "Direct"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1513
msgid "Direct Domain List"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1427
#: htdocs/luci-static/resources/view/homeproxy/client.js:1472
msgid "Direct IPv4 IP-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1430
#: htdocs/luci-static/resources/view/homeproxy/client.js:1475
msgid "Direct IPv6 IP-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1433
msgid "Direct MAC-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:123
#: htdocs/luci-static/resources/view/homeproxy/client.js:151
#: htdocs/luci-static/resources/view/homeproxy/client.js:1421
#: htdocs/luci-static/resources/view/homeproxy/node.js:499
#: htdocs/luci-static/resources/view/homeproxy/node.js:555
#: htdocs/luci-static/resources/view/homeproxy/node.js:568
#: htdocs/luci-static/resources/view/homeproxy/node.js:1115
#: htdocs/luci-static/resources/view/homeproxy/node.js:1368
#: htdocs/luci-static/resources/view/homeproxy/server.js:276
#: htdocs/luci-static/resources/view/homeproxy/server.js:289
msgid "Disable"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:363
msgid "Disable (the service)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:787
#: htdocs/luci-static/resources/view/homeproxy/client.js:899
msgid "Disable DNS cache"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:788
msgid "Disable DNS cache in this query."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:670
msgid "Disable HTTP challenge"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:605
#: htdocs/luci-static/resources/view/homeproxy/server.js:321
msgid "Disable Path MTU discovery"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:674
msgid "Disable TLS ALPN challenge"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:710
msgid "Disable UDP domain unmapping"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1181
msgid "Disable cache and save cache in this query."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:901
msgid "Disable cache expire"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1180
msgid "Disable dns cache"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:606
#: htdocs/luci-static/resources/view/homeproxy/server.js:322
msgid ""
"Disables Path MTU Discovery (RFC 8899). Packets will then be at most 1252 "
"(IPv4) / 1232 (IPv6) bytes in size."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:814
#: htdocs/luci-static/resources/view/homeproxy/client.js:1250
msgid "Domain keyword"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:805
#: htdocs/luci-static/resources/view/homeproxy/client.js:1241
msgid "Domain name"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:818
#: htdocs/luci-static/resources/view/homeproxy/client.js:1254
msgid "Domain regex"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:427
msgid "Domain resolver"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:347
#: htdocs/luci-static/resources/view/homeproxy/client.js:446
#: htdocs/luci-static/resources/view/homeproxy/client.js:1173
msgid "Domain strategy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:781
msgid "Domain strategy for resolving the domain names."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:810
#: htdocs/luci-static/resources/view/homeproxy/client.js:1246
msgid "Domain suffix"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:600
msgid "Domains"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:774
msgid "Don't drop packets"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1208
msgid "Don't drop requests"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:989
#: htdocs/luci-static/resources/view/homeproxy/server.js:517
msgid "Download bandwidth"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:990
#: htdocs/luci-static/resources/view/homeproxy/server.js:518
msgid "Download bandwidth in Mbps."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:770
#: htdocs/luci-static/resources/view/homeproxy/client.js:776
msgid "Drop packets"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1203
#: htdocs/luci-static/resources/view/homeproxy/client.js:1210
msgid "Drop requests"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1375
msgid ""
"Drop/keep nodes that contain the specific keywords. <a target=\"_blank\" "
"href=\"https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/"
"Regular_Expressions\">Regex</a> is supported."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1367
msgid "Drop/keep specific nodes from subscriptions."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:691
msgid ""
"EAB (External Account Binding) contains information necessary to bind or map "
"an ACME account to some other account known by the CA.<br/>External account "
"bindings are \"used to associate an ACME account with an existing account in "
"a non-ACME system, such as a CA customer database."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1094
msgid ""
"ECH (Encrypted Client Hello) is a TLS extension that allows a client to "
"encrypt the first part of its ClientHello message."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1109
#: htdocs/luci-static/resources/view/homeproxy/server.js:835
msgid "ECH config"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1098
msgid "ECH config path"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:794
msgid "ECH key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:798
#: htdocs/luci-static/resources/view/homeproxy/client.js:908
#: htdocs/luci-static/resources/view/homeproxy/client.js:1193
msgid "EDNS Client subnet"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:873
#: htdocs/luci-static/resources/view/homeproxy/server.js:481
msgid "Early data"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:880
#: htdocs/luci-static/resources/view/homeproxy/server.js:488
msgid "Early data header name"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:489
msgid "Early data is sent in path instead of header by default."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1211
msgid "Edit nodes"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:612
msgid "Email"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:414
#: htdocs/luci-static/resources/view/homeproxy/client.js:590
#: htdocs/luci-static/resources/view/homeproxy/client.js:942
#: htdocs/luci-static/resources/view/homeproxy/client.js:1069
#: htdocs/luci-static/resources/view/homeproxy/client.js:1328
#: htdocs/luci-static/resources/view/homeproxy/server.js:148
#: htdocs/luci-static/resources/view/homeproxy/server.js:166
msgid "Enable"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:722
#: htdocs/luci-static/resources/view/homeproxy/server.js:371
msgid ""
"Enable 0-RTT QUIC connection handshake on the client side. This is not "
"impacting much on the performance, as the protocol is fully multiplexed.<br/"
">Disabling this is highly recommended, as it is vulnerable to replay attacks."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:721
#: htdocs/luci-static/resources/view/homeproxy/server.js:370
msgid "Enable 0-RTT handshake"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:595
msgid "Enable ACME"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1093
msgid "Enable ECH"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:984
#: htdocs/luci-static/resources/view/homeproxy/server.js:512
msgid "Enable TCP Brutal"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:985
#: htdocs/luci-static/resources/view/homeproxy/server.js:513
msgid "Enable TCP Brutal congestion control algorithm"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1168
#: htdocs/luci-static/resources/view/homeproxy/server.js:855
msgid "Enable UDP fragmentation."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:328
msgid "Enable endpoint-independent NAT"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:980
#: htdocs/luci-static/resources/view/homeproxy/server.js:507
msgid "Enable padding"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:846
msgid "Enable tcp fast open for listener."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1172
msgid ""
"Enable the SUoT protocol, requires server support. Conflict with multiplex."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:612
#: htdocs/luci-static/resources/view/homeproxy/node.js:748
#: htdocs/luci-static/resources/view/homeproxy/server.js:338
msgid "Encrypt method"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:139
msgid "Error"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:237
#: htdocs/luci-static/resources/homeproxy.js:271
#: htdocs/luci-static/resources/homeproxy.js:279
#: htdocs/luci-static/resources/homeproxy.js:297
#: htdocs/luci-static/resources/homeproxy.js:306
#: htdocs/luci-static/resources/homeproxy.js:317
#: htdocs/luci-static/resources/homeproxy.js:326
#: htdocs/luci-static/resources/homeproxy.js:328
#: htdocs/luci-static/resources/view/homeproxy/client.js:195
#: htdocs/luci-static/resources/view/homeproxy/client.js:207
#: htdocs/luci-static/resources/view/homeproxy/client.js:211
#: htdocs/luci-static/resources/view/homeproxy/client.js:230
#: htdocs/luci-static/resources/view/homeproxy/client.js:241
#: htdocs/luci-static/resources/view/homeproxy/client.js:245
#: htdocs/luci-static/resources/view/homeproxy/client.js:274
#: htdocs/luci-static/resources/view/homeproxy/client.js:504
#: htdocs/luci-static/resources/view/homeproxy/client.js:518
#: htdocs/luci-static/resources/view/homeproxy/client.js:521
#: htdocs/luci-static/resources/view/homeproxy/client.js:1356
#: htdocs/luci-static/resources/view/homeproxy/client.js:1361
#: htdocs/luci-static/resources/view/homeproxy/client.js:1364
#: htdocs/luci-static/resources/view/homeproxy/client.js:1506
#: htdocs/luci-static/resources/view/homeproxy/client.js:1538
#: htdocs/luci-static/resources/view/homeproxy/node.js:488
#: htdocs/luci-static/resources/view/homeproxy/node.js:1131
#: htdocs/luci-static/resources/view/homeproxy/node.js:1302
#: htdocs/luci-static/resources/view/homeproxy/node.js:1356
#: htdocs/luci-static/resources/view/homeproxy/node.js:1359
#: htdocs/luci-static/resources/view/homeproxy/server.js:235
#: htdocs/luci-static/resources/view/homeproxy/server.js:618
#: htdocs/luci-static/resources/view/homeproxy/server.js:620
msgid "Expecting: %s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:690
msgid "External Account Binding"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:702
msgid "External account MAC key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:696
msgid "External account key ID"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1236
msgid "Extra records"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:86
msgid "Failed to generate %s, error: %s."
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:261
msgid "Failed to upload %s, error: %s."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:140
msgid "Fatal"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1374
msgid "Filter keywords"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1366
msgid "Filter nodes"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:630
msgid "Firefox / uquic firefox"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:171
msgid "Firewall"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:736
#: htdocs/luci-static/resources/view/homeproxy/server.js:385
msgid "Flow"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:428
msgid "For resolving domain name in the server address."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1339
msgid "Format"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:730
msgid "Fragment TLS handshake into multiple TLS records."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:736
msgid ""
"Fragment TLS handshakes. Due to poor performance, try <code>%s</code> first."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:742
msgid "Fragment fallback delay"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:842
msgid "GET"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:256
msgid "GFW list version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:252
msgid "GFWList"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1445
msgid "Gaming mode IPv4 IP-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1447
msgid "Gaming mode IPv6 IP-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1450
msgid "Gaming mode MAC-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:36
#: htdocs/luci-static/resources/view/homeproxy/server.js:38
#: htdocs/luci-static/resources/view/homeproxy/server.js:827
msgid "Generate"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:260
msgid "GitHub token"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:256
msgid "Global"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:759
msgid "Global padding"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1452
msgid "Global proxy IPv4 IP-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1455
msgid "Global proxy IPv6 IP-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1458
msgid "Global proxy MAC-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:146
msgid "Global settings"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:238
msgid "Google"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:184
msgid "Google Public DNS (8.8.8.8)"
msgstr ""
#: root/usr/share/rpcd/acl.d/luci-app-homeproxy.json:3
msgid "Grant access to homeproxy configuration"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:620
#: htdocs/luci-static/resources/view/homeproxy/client.js:1107
#: htdocs/luci-static/resources/view/homeproxy/node.js:430
#: htdocs/luci-static/resources/view/homeproxy/node.js:777
#: htdocs/luci-static/resources/view/homeproxy/server.js:177
#: htdocs/luci-static/resources/view/homeproxy/server.js:403
msgid "HTTP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:952
msgid "HTTP3"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:332
msgid ""
"HTTP3 server behavior when authentication fails.<br/>A 404 page will be "
"returned if empty."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:951
msgid "HTTPS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:778
#: htdocs/luci-static/resources/view/homeproxy/server.js:404
msgid "HTTPUpgrade"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:744
msgid "Handshake server address"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:750
msgid "Handshake server port"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:973
msgid "Headers"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:727
#: htdocs/luci-static/resources/view/homeproxy/server.js:376
msgid "Heartbeat interval"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:55
#: htdocs/luci-static/resources/view/homeproxy/client.js:57
#: htdocs/luci-static/resources/view/homeproxy/client.js:101
#: htdocs/luci-static/resources/view/homeproxy/status.js:282
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:3
msgid "HomeProxy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:58
#: htdocs/luci-static/resources/view/homeproxy/server.js:60
#: htdocs/luci-static/resources/view/homeproxy/server.js:129
msgid "HomeProxy Server"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:535
msgid "Hop interval"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:529
msgid "Hopping port"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:826
#: htdocs/luci-static/resources/view/homeproxy/node.js:831
#: htdocs/luci-static/resources/view/homeproxy/node.js:865
#: htdocs/luci-static/resources/view/homeproxy/server.js:437
#: htdocs/luci-static/resources/view/homeproxy/server.js:442
#: htdocs/luci-static/resources/view/homeproxy/server.js:473
msgid "Host"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:669
msgid "Host key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:674
msgid "Host key algorithms"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:581
#: htdocs/luci-static/resources/view/homeproxy/client.js:1060
msgid "Host/IP fields"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:364
msgid ""
"How long the server should wait for the client to send the authentication "
"command (in seconds)."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:432
#: htdocs/luci-static/resources/view/homeproxy/server.js:179
msgid "Hysteria"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:433
#: htdocs/luci-static/resources/view/homeproxy/server.js:180
msgid "Hysteria2"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:830
#: htdocs/luci-static/resources/view/homeproxy/client.js:1266
msgid "IP CIDR"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:608
#: htdocs/luci-static/resources/view/homeproxy/client.js:1088
msgid "IP version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:610
#: htdocs/luci-static/resources/view/homeproxy/client.js:1089
msgid "IPv4"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:20
msgid "IPv4 only"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:611
#: htdocs/luci-static/resources/view/homeproxy/client.js:1090
msgid "IPv6"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:21
msgid "IPv6 only"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:297
msgid "IPv6 support"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:506
msgid "Idle session check interval"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:513
msgid "Idle session check timeout"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:553
#: htdocs/luci-static/resources/view/homeproxy/node.js:847
#: htdocs/luci-static/resources/view/homeproxy/server.js:456
msgid "Idle timeout"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:717
msgid ""
"If enabled, attempts to connect UDP connection to the destination instead of "
"listen."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:711
msgid ""
"If enabled, for UDP proxy requests addressed to a domain, the original "
"packet address will be sent in the response instead of the mapped domain."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:819
msgid ""
"If enabled, the client transport sends keepalive pings even with no active "
"connections."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:348
msgid ""
"If set, the requested domain name will be resolved to IP before routing."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:804
#: htdocs/luci-static/resources/view/homeproxy/server.js:425
msgid ""
"If the transport doesn't see any activity after a duration of this time (in "
"seconds), it pings the client to check if the connection is still active."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1073
msgid ""
"If you have the root certificate, use this option instead of allowing "
"insecure."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:326
msgid "Ignore client bandwidth"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1285
msgid "Import"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1232
#: htdocs/luci-static/resources/view/homeproxy/node.js:1311
#: htdocs/luci-static/resources/view/homeproxy/node.js:1313
msgid "Import share links"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:336
#: htdocs/luci-static/resources/view/homeproxy/server.js:860
msgid "In seconds."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:937
msgid "In seconds. Disabled by default."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:521
msgid ""
"In the check, at least the first <code>n</code> idle sessions are kept open."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:514
msgid ""
"In the check, close sessions that have been idle for longer than this, in "
"seconds."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:904
msgid "Independent cache per server"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:137
msgid "Info"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1404
msgid "Interface Control"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:560
msgid "Interrupt existing connections"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:561
msgid "Interrupt existing connections when the selected outbound has changed."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:507
msgid "Interval checking for idle sessions, in seconds."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:728
#: htdocs/luci-static/resources/view/homeproxy/server.js:377
msgid ""
"Interval for sending heartbeat packets for keeping the connection alive (in "
"seconds)."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:665
#: htdocs/luci-static/resources/view/homeproxy/client.js:1141
msgid "Invert"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:666
#: htdocs/luci-static/resources/view/homeproxy/client.js:1142
msgid "Invert match result."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:775
msgid "Key path"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1418
msgid "LAN IP Policy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:409
#: htdocs/luci-static/resources/view/homeproxy/client.js:585
#: htdocs/luci-static/resources/view/homeproxy/client.js:937
#: htdocs/luci-static/resources/view/homeproxy/client.js:1064
#: htdocs/luci-static/resources/view/homeproxy/client.js:1323
#: htdocs/luci-static/resources/view/homeproxy/node.js:422
#: htdocs/luci-static/resources/view/homeproxy/server.js:160
msgid "Label"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:743
#: htdocs/luci-static/resources/view/homeproxy/server.js:392
msgid ""
"Legacy protocol support (VMess MD5 Authentication) is provided for "
"compatibility purposes only, use of alterId > 1 is not recommended."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:325
msgid "Less compatibility and sometimes better performance."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:629
msgid "Let's Encrypt"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:897
msgid ""
"List of IP (v4 or v6) addresses prefixes to be assigned to the interface."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:132
#: htdocs/luci-static/resources/view/homeproxy/client.js:161
#: htdocs/luci-static/resources/view/homeproxy/client.js:497
msgid "List of nodes to test."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1036
#: htdocs/luci-static/resources/view/homeproxy/server.js:566
msgid "List of supported application level protocols, in order of preference."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1227
msgid "List of text DNS record to respond as answers."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1237
msgid "List of text DNS record to respond as extra records."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1232
msgid "List of text DNS record to respond as name servers."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:193
msgid "Listen address"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1406
msgid "Listen interfaces"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:198
msgid "Listen port"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:174
msgid "Loading"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1334
msgid "Local"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:896
msgid "Local address"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:191
msgid "Log file does not exist."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:184
msgid "Log is empty."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:930
msgid "MTU"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:150
msgid "Main UDP node"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:122
msgid "Main node"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:662
msgid "Make IP CIDR in rule set used to match the source IP."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1134
msgid "Make IP CIDR in rule sets match the source IP."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1138
msgid "Make IP CIDR in rule-sets accept empty query response."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:905
msgid ""
"Make each DNS server's cache independent for special purposes. If enabled, "
"will slightly degrade performance."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:331
msgid "Masquerade"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1267
msgid ""
"Match IP CIDR with query response. Current rule will be skipped if not match."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:831
msgid "Match IP CIDR."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:811
#: htdocs/luci-static/resources/view/homeproxy/client.js:1247
msgid "Match domain suffix."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:815
#: htdocs/luci-static/resources/view/homeproxy/client.js:1251
msgid "Match domain using keyword."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:819
#: htdocs/luci-static/resources/view/homeproxy/client.js:1255
msgid "Match domain using regular expression."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:806
#: htdocs/luci-static/resources/view/homeproxy/client.js:1242
msgid "Match full domain."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:854
#: htdocs/luci-static/resources/view/homeproxy/client.js:1291
msgid "Match port range. Format as START:/:END/START:END."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:849
#: htdocs/luci-static/resources/view/homeproxy/client.js:1286
msgid "Match port."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:835
#: htdocs/luci-static/resources/view/homeproxy/client.js:1271
msgid "Match private IP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1272
msgid "Match private IP with query response."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:827
#: htdocs/luci-static/resources/view/homeproxy/client.js:1263
msgid "Match private source IP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:859
#: htdocs/luci-static/resources/view/homeproxy/client.js:1296
msgid "Match process name."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:867
#: htdocs/luci-static/resources/view/homeproxy/client.js:1304
msgid "Match process path using regular expression."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:863
#: htdocs/luci-static/resources/view/homeproxy/client.js:1300
msgid "Match process path."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1095
msgid "Match query type."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:647
#: htdocs/luci-static/resources/view/homeproxy/client.js:1119
msgid "Match rule set."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:823
#: htdocs/luci-static/resources/view/homeproxy/client.js:1259
msgid "Match source IP CIDR."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:844
#: htdocs/luci-static/resources/view/homeproxy/client.js:1281
msgid "Match source port range. Format as START:/:END/START:END."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:839
#: htdocs/luci-static/resources/view/homeproxy/client.js:1276
msgid "Match source port."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:643
#: htdocs/luci-static/resources/view/homeproxy/client.js:1115
msgid "Match user name."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:579
#: htdocs/luci-static/resources/view/homeproxy/server.js:261
msgid "Max download speed"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:580
#: htdocs/luci-static/resources/view/homeproxy/server.js:262
msgid "Max download speed in Mbps."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:739
msgid "Max time difference"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:586
#: htdocs/luci-static/resources/view/homeproxy/server.js:268
msgid "Max upload speed"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:587
#: htdocs/luci-static/resources/view/homeproxy/server.js:269
msgid "Max upload speed in Mbps."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1056
#: htdocs/luci-static/resources/view/homeproxy/server.js:578
msgid "Maximum TLS version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:961
#: htdocs/luci-static/resources/view/homeproxy/node.js:975
msgid "Maximum connections"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:973
msgid ""
"Maximum multiplexed streams in a connection before opening a new connection."
"<br/>Conflict with <code>%s</code> and <code>%s</code>."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:972
msgid "Maximum streams"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:768
#: htdocs/luci-static/resources/view/homeproxy/client.js:776
#: htdocs/luci-static/resources/view/homeproxy/client.js:1201
#: htdocs/luci-static/resources/view/homeproxy/client.js:1210
#: htdocs/luci-static/resources/view/homeproxy/node.js:841
#: htdocs/luci-static/resources/view/homeproxy/server.js:452
msgid "Method"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1048
#: htdocs/luci-static/resources/view/homeproxy/server.js:570
msgid "Minimum TLS version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:520
msgid "Minimum idle sessions"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:967
msgid ""
"Minimum multiplexed streams in a connection before opening a new connection."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:966
#: htdocs/luci-static/resources/view/homeproxy/node.js:975
msgid "Minimum streams"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:310
#: htdocs/luci-static/resources/view/homeproxy/server.js:183
msgid "Mixed"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:321
msgid "Mixed <code>system</code> TCP stack and <code>gVisor</code> UDP stack."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:595
#: htdocs/luci-static/resources/view/homeproxy/client.js:1074
msgid "Mode"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1164
#: htdocs/luci-static/resources/view/homeproxy/server.js:850
msgid "MultiPath TCP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:944
#: htdocs/luci-static/resources/view/homeproxy/server.js:500
msgid "Multiplex"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:952
msgid "Multiplex protocol."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:57
#: htdocs/luci-static/resources/view/homeproxy/server.js:60
msgid "NOT RUNNING"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1395
msgid "NOTE: Save current settings before updating subscriptions."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1231
msgid "NS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:711
msgid "Native"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:181
msgid "NaïveProxy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:637
#: htdocs/luci-static/resources/view/homeproxy/client.js:1098
#: htdocs/luci-static/resources/view/homeproxy/server.js:866
msgid "Network"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:701
msgid "New Reno"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:774
#: htdocs/luci-static/resources/view/homeproxy/node.js:791
#: htdocs/luci-static/resources/view/homeproxy/server.js:400
#: htdocs/luci-static/resources/view/homeproxy/server.js:417
msgid "No TCP transport, plain HTTP is merged into the HTTP transport."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:789
#: htdocs/luci-static/resources/view/homeproxy/server.js:415
msgid "No additional encryption support: It's basically duplicate encryption."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1412
msgid "No subscription available"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1437
msgid "No subscription node"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1271
msgid "No valid share link found."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:419
#: htdocs/luci-static/resources/view/homeproxy/node.js:396
msgid "Node"
msgstr ""
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:22
msgid "Node Settings"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1217
msgid "Nodes"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:993
#: htdocs/luci-static/resources/view/homeproxy/node.js:737
#: htdocs/luci-static/resources/view/homeproxy/node.js:775
#: htdocs/luci-static/resources/view/homeproxy/server.js:386
#: htdocs/luci-static/resources/view/homeproxy/server.js:401
msgid "None"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:573
#: htdocs/luci-static/resources/view/homeproxy/server.js:294
msgid "Obfuscate password"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:567
#: htdocs/luci-static/resources/view/homeproxy/server.js:288
msgid "Obfuscate type"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1407
msgid "Only process traffic from specific interfaces. Leave empty for all."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:254
msgid "Only proxy mainland China"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:580
#: htdocs/luci-static/resources/view/homeproxy/client.js:1059
msgid "Other fields"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:460
#: htdocs/luci-static/resources/view/homeproxy/client.js:678
#: htdocs/luci-static/resources/view/homeproxy/client.js:1026
#: htdocs/luci-static/resources/view/homeproxy/client.js:1374
msgid "Outbound"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:420
msgid "Outbound node"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:696
msgid "Override address"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:352
msgid "Override destination"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:703
msgid "Override port"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:353
msgid "Override the connection destination address with the sniffed domain."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:697
msgid "Override the connection destination address."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:704
msgid "Override the connection destination port."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:843
msgid "PUT"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:886
msgid "Packet encoding"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:244
msgid "Padding scheme"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:141
msgid "Panic"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:464
#: htdocs/luci-static/resources/view/homeproxy/server.js:210
msgid "Password"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:966
#: htdocs/luci-static/resources/view/homeproxy/client.js:1345
#: htdocs/luci-static/resources/view/homeproxy/node.js:836
#: htdocs/luci-static/resources/view/homeproxy/node.js:869
#: htdocs/luci-static/resources/view/homeproxy/server.js:447
#: htdocs/luci-static/resources/view/homeproxy/server.js:477
msgid "Path"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:911
msgid "Peer pubkic key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:329
msgid ""
"Performance may degrade slightly, so it is not recommended to enable on when "
"it is not needed."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:936
msgid "Persistent keepalive interval"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:855
#: htdocs/luci-static/resources/view/homeproxy/server.js:464
msgid "Ping timeout"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:630
msgid "Plugin"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:637
msgid "Plugin opts"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:848
#: htdocs/luci-static/resources/view/homeproxy/client.js:962
#: htdocs/luci-static/resources/view/homeproxy/client.js:1285
#: htdocs/luci-static/resources/view/homeproxy/node.js:453
msgid "Port"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:276
msgid "Port %s alrealy exists!"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:582
#: htdocs/luci-static/resources/view/homeproxy/client.js:1061
msgid "Port fields"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:536
msgid "Port hopping interval in seconds."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:853
#: htdocs/luci-static/resources/view/homeproxy/client.js:1290
msgid "Port range"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:918
msgid "Pre-shared key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1149
msgid "Predefined"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:18
msgid "Prefer IPv4"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:19
msgid "Prefer IPv6"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:678
#: htdocs/luci-static/resources/view/homeproxy/node.js:903
msgid "Private key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:683
msgid "Private key passphrase"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:583
#: htdocs/luci-static/resources/view/homeproxy/client.js:1062
msgid "Process fields"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:858
#: htdocs/luci-static/resources/view/homeproxy/client.js:1295
msgid "Process name"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:862
#: htdocs/luci-static/resources/view/homeproxy/client.js:1299
msgid "Process path"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:866
#: htdocs/luci-static/resources/view/homeproxy/client.js:1303
msgid "Process path (regex)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:615
#: htdocs/luci-static/resources/view/homeproxy/client.js:1103
#: htdocs/luci-static/resources/view/homeproxy/node.js:543
#: htdocs/luci-static/resources/view/homeproxy/node.js:951
#: htdocs/luci-static/resources/view/homeproxy/server.js:250
msgid "Protocol"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:767
msgid "Protocol parameter. Enable length block encryption."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:760
msgid ""
"Protocol parameter. Will waste traffic randomly if enabled (enabled by "
"default in v2ray and cannot be disabled)."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1481
msgid "Proxy Domain List"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1436
#: htdocs/luci-static/resources/view/homeproxy/client.js:1465
msgid "Proxy IPv4 IP-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1439
#: htdocs/luci-static/resources/view/homeproxy/client.js:1468
msgid "Proxy IPv6 IP-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1442
msgid "Proxy MAC-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1423
msgid "Proxy all except listed"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1420
msgid "Proxy filter mode"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1422
msgid "Proxy listed only"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:284
msgid "Proxy mode"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:497
msgid "Proxy protocol"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:621
#: htdocs/luci-static/resources/view/homeproxy/client.js:953
#: htdocs/luci-static/resources/view/homeproxy/client.js:1108
#: htdocs/luci-static/resources/view/homeproxy/node.js:712
#: htdocs/luci-static/resources/view/homeproxy/node.js:779
#: htdocs/luci-static/resources/view/homeproxy/server.js:405
msgid "QUIC"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:699
#: htdocs/luci-static/resources/view/homeproxy/server.js:355
msgid "QUIC congestion control algorithm."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:599
#: htdocs/luci-static/resources/view/homeproxy/server.js:307
msgid "QUIC connection receive window"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:314
msgid "QUIC maximum concurrent bidirectional streams"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:593
#: htdocs/luci-static/resources/view/homeproxy/server.js:300
msgid "QUIC stream receive window"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1094
msgid "Query type"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1214
msgid "RCode"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:622
#: htdocs/luci-static/resources/view/homeproxy/client.js:1109
msgid "RDP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:917
msgid "RDRC timeout"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1142
#: htdocs/luci-static/resources/view/homeproxy/server.js:709
msgid "REALITY"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:714
msgid "REALITY private key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1147
#: htdocs/luci-static/resources/view/homeproxy/server.js:730
msgid "REALITY public key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1153
#: htdocs/luci-static/resources/view/homeproxy/server.js:734
msgid "REALITY short ID"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:55
#: htdocs/luci-static/resources/view/homeproxy/server.js:58
msgid "RUNNING"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:665
msgid "Random version will be used if empty."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:488
msgid "Recursive outbound detected!"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1012
msgid "Recursive resolver detected!"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:285
msgid "Redirect TCP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:287
msgid "Redirect TCP + TProxy UDP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:289
msgid "Redirect TCP + Tun UDP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:219
msgid "Refresh every %s seconds."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:659
msgid "Region ID"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:672
#: htdocs/luci-static/resources/view/homeproxy/client.js:1148
msgid "Reject"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1335
msgid "Remote"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1434
msgid "Remove %s nodes"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1424
msgid "Remove all nodes from subscriptions"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1202
msgid "Reply with REFUSED"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:769
msgid "Reply with TCP RST / ICMP port unreachable"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:925
msgid "Reserved field bytes"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:673
msgid "Resolve"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:780
msgid "Resolve strategy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:241
msgid "Resources management"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:880
msgid "Reuse address"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:881
msgid "Reuse listener address."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:792
#: htdocs/luci-static/resources/view/homeproxy/client.js:1186
msgid "Rewrite TTL"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:793
#: htdocs/luci-static/resources/view/homeproxy/client.js:1187
msgid "Rewrite TTL in DNS responses."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:670
#: htdocs/luci-static/resources/view/homeproxy/client.js:1146
msgid "Route"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:671
#: htdocs/luci-static/resources/view/homeproxy/client.js:1147
msgid "Route options"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:396
msgid "Routing Nodes"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:567
msgid "Routing Rules"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:120
msgid "Routing Settings"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:251
msgid "Routing mode"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:405
msgid "Routing node"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:264
msgid "Routing ports"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:576
msgid "Routing rule"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1310
msgid "Rule Set"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:646
#: htdocs/luci-static/resources/view/homeproxy/client.js:1118
#: htdocs/luci-static/resources/view/homeproxy/client.js:1319
msgid "Rule set"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:661
#: htdocs/luci-static/resources/view/homeproxy/client.js:1133
msgid "Rule set IP CIDR as source IP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1352
msgid "Rule set URL"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:623
#: htdocs/luci-static/resources/view/homeproxy/client.js:1110
#: htdocs/luci-static/resources/view/homeproxy/node.js:438
msgid "SSH"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:624
#: htdocs/luci-static/resources/view/homeproxy/client.js:1111
msgid "STUN"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1177
msgid "SUoT version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:632
msgid "Safari / Apple Network API"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:569
#: htdocs/luci-static/resources/view/homeproxy/server.js:290
msgid "Salamander"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:152
msgid "Same as main node"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:267
#: htdocs/luci-static/resources/view/homeproxy/status.js:273
msgid "Save"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1397
msgid "Save current settings"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1394
msgid "Save subscriptions settings"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1154
#: htdocs/luci-static/resources/view/homeproxy/server.js:156
msgid "Server"
msgstr ""
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:30
msgid "Server Settings"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:607
msgid ""
"Server name to use when choosing a certificate if the ClientHello's "
"ServerName field is empty."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:151
msgid "Server settings"
msgstr ""
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:38
msgid "Service Status"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1174
msgid "Set domain strategy for this query."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:436
msgid "ShadowTLS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:644
msgid "ShadowTLS version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:435
#: htdocs/luci-static/resources/view/homeproxy/server.js:184
msgid "Shadowsocks"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:628
msgid "Sniffed client type (QUIC client type or SSH client name)."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:616
#: htdocs/luci-static/resources/view/homeproxy/client.js:1104
msgid ""
"Sniffed protocol, see <a target=\"_blank\" href=\"https://sing-box.sagernet."
"org/configuration/route/sniff/\">Sniff</a> for details."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:437
#: htdocs/luci-static/resources/view/homeproxy/server.js:185
msgid "Socks"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:654
msgid "Socks version"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:655
msgid "Socks4"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:656
msgid "Socks4A"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:657
msgid "Socks5"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:822
#: htdocs/luci-static/resources/view/homeproxy/client.js:1258
msgid "Source IP CIDR"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1341
msgid "Source file"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:838
#: htdocs/luci-static/resources/view/homeproxy/client.js:1275
msgid "Source port"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:843
#: htdocs/luci-static/resources/view/homeproxy/client.js:1280
msgid "Source port range"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:750
msgid ""
"Specifies DNS server tag to use instead of selecting through DNS routing."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:796
#: htdocs/luci-static/resources/view/homeproxy/node.js:848
msgid ""
"Specifies the period of time (in seconds) after which a health check will be "
"performed using a ping frame if no frames have been received on the "
"connection.<br/>Please note that a ping response is considered a received "
"frame, so if there is no other traffic on the connection, the health check "
"will be executed every interval."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:422
#: htdocs/luci-static/resources/view/homeproxy/server.js:457
msgid ""
"Specifies the time (in seconds) until idle clients should be closed with a "
"GOAWAY frame. PING frames are not considered as activity."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:800
#: htdocs/luci-static/resources/view/homeproxy/node.js:856
msgid ""
"Specifies the timeout duration (in seconds) after sending a PING frame, "
"within which a response must be received.<br/>If a response to the PING "
"frame is not received within the specified timeout duration, the connection "
"will be closed."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:265
msgid ""
"Specify target ports to be proxied. Multiple ports must be separated by "
"commas."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:913
msgid "Store RDRC"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:914
msgid ""
"Store rejected DNS response cache.<br/>The check results of <code>Address "
"filter DNS rule items</code> will be cached until expiration."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:557
#: htdocs/luci-static/resources/view/homeproxy/server.js:278
msgid "String"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1322
msgid "Sub (%s)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1349
msgid "Subscription URL-s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1333
msgid "Subscriptions"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1273
msgid "Successfully imported %s nodes of total %s."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:86
msgid "Successfully updated."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1233
#: htdocs/luci-static/resources/view/homeproxy/node.js:1350
msgid ""
"Support Hysteria, Shadowsocks, Trojan, v2rayN (VMess), and XTLS (VLESS) "
"online configuration delivery standard."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:180
msgid ""
"Support UDP, TCP, DoH, DoQ, DoT. TCP protocol will be used if not specified."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:313
msgid "System"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:383
#: htdocs/luci-static/resources/view/homeproxy/client.js:435
#: htdocs/luci-static/resources/view/homeproxy/client.js:757
#: htdocs/luci-static/resources/view/homeproxy/client.js:888
#: htdocs/luci-static/resources/view/homeproxy/client.js:995
#: htdocs/luci-static/resources/view/homeproxy/client.js:1161
msgid "System DNS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:638
#: htdocs/luci-static/resources/view/homeproxy/client.js:949
#: htdocs/luci-static/resources/view/homeproxy/client.js:1099
#: htdocs/luci-static/resources/view/homeproxy/server.js:867
msgid "TCP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1161
#: htdocs/luci-static/resources/view/homeproxy/server.js:845
msgid "TCP fast open"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:307
msgid "TCP/IP stack"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:308
msgid "TCP/IP stack."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:625
#: htdocs/luci-static/resources/view/homeproxy/client.js:950
#: htdocs/luci-static/resources/view/homeproxy/client.js:1112
#: htdocs/luci-static/resources/view/homeproxy/node.js:1003
#: htdocs/luci-static/resources/view/homeproxy/server.js:532
msgid "TLS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1035
#: htdocs/luci-static/resources/view/homeproxy/server.js:565
msgid "TLS ALPN"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:979
#: htdocs/luci-static/resources/view/homeproxy/node.js:1030
#: htdocs/luci-static/resources/view/homeproxy/server.js:560
msgid "TLS SNI"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:735
msgid "TLS fragment"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:787
#: htdocs/luci-static/resources/view/homeproxy/server.js:413
msgid "TLS is not enforced. If TLS is not configured, plain HTTP 1.1 is used."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:729
#: htdocs/luci-static/resources/view/homeproxy/client.js:737
msgid "TLS record fragment"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:988
msgid ""
"Tag of a another server to resolve the domain name in the address. Required "
"if address contains domain."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1027
msgid "Tag of an outbound for connecting to the dns server."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1375
msgid "Tag of the outbound to download rule set."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1155
msgid "Tag of the target dns server."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:679
msgid "Tag of the target outbound."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:327
msgid ""
"Tell the client to use the BBR flow control algorithm instead of Hysteria CC."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:187
#: htdocs/luci-static/resources/view/homeproxy/client.js:222
msgid "Tencent Public DNS (119.29.29.29)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:510
msgid "Test URL"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:138
#: htdocs/luci-static/resources/view/homeproxy/client.js:167
#: htdocs/luci-static/resources/view/homeproxy/client.js:530
msgid "Test interval"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:538
msgid "Test interval must be less or equal than idle timeout."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:144
#: htdocs/luci-static/resources/view/homeproxy/client.js:173
#: htdocs/luci-static/resources/view/homeproxy/client.js:546
msgid "Test tolerance"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:628
msgid "The ACME CA provider to use."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:878
msgid "The DNS strategy for resolving the domain name in the address."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:600
#: htdocs/luci-static/resources/view/homeproxy/server.js:308
msgid "The QUIC connection-level flow control window for receiving data."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:594
#: htdocs/luci-static/resources/view/homeproxy/server.js:301
msgid "The QUIC stream-level flow control window for receiving data."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:511
msgid "The URL to test."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:958
msgid "The address of the dns server."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:679
msgid ""
"The alternate port to use for the ACME HTTP challenge; if non-empty, this "
"port will be used instead of 80 to spin up a listener for the HTTP challenge."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:685
msgid ""
"The alternate port to use for the ACME TLS-ALPN challenge; the system must "
"forward 443 to this port for challenge to succeed."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:596
msgid ""
"The default rule uses the following matching logic:<br/><code>(domain || "
"domain_suffix || domain_keyword || domain_regex || ip_cidr || "
"ip_is_private)</code> &&<br/><code>(port || port_range)</code> &&<br/"
"><code>(source_ip_cidr || source_ip_is_private)</code> &&<br/"
"><code>(source_port || source_port_range)</code> &&<br/><code>other fields</"
"code>.<br/>Additionally, included rule sets can be considered merged rather "
"than as a single rule sub-item."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1075
msgid ""
"The default rule uses the following matching logic:<br/><code>(domain || "
"domain_suffix || domain_keyword || domain_regex)</code> &&<br/><code>(port "
"|| port_range)</code> &&<br/><code>(source_ip_cidr || source_ip_is_private)</"
"code> &&<br/><code>(source_port || source_port_range)</code> &&<br/"
"><code>other fields</code>.<br/>Additionally, included rule sets can be "
"considered merged rather than as a single rule sub-item."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:218
msgid ""
"The dns server for resolving China domains. Support UDP, TCP, DoH, DoQ, DoT."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:447
#: htdocs/luci-static/resources/view/homeproxy/client.js:1020
msgid "The domain strategy for resolving the domain name in the address."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1065
#: htdocs/luci-static/resources/view/homeproxy/server.js:587
msgid ""
"The elliptic curves that will be used in an ECDHE handshake, in preference "
"order. If empty, the default will be used."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:613
msgid ""
"The email address to use when creating or selecting an existing ACME server "
"account."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:743
msgid ""
"The fallback value in milliseconds used when TLS segmentation cannot "
"automatically determine the wait time."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:554
msgid "The idle timeout in seconds."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1057
#: htdocs/luci-static/resources/view/homeproxy/server.js:579
msgid "The maximum TLS version that is acceptable."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:315
msgid ""
"The maximum number of QUIC concurrent bidirectional streams that a peer is "
"allowed to open."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:740
msgid "The maximum time difference between the server and the client."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1049
#: htdocs/luci-static/resources/view/homeproxy/server.js:571
msgid "The minimum TLS version that is acceptable."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:102
#: htdocs/luci-static/resources/view/homeproxy/server.js:130
msgid "The modern ImmortalWrt proxy platform for ARM64/AMD64."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:454
#: htdocs/luci-static/resources/view/homeproxy/server.js:875
msgid "The network interface to bind to."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:967
msgid "The path of the DNS server."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1099
msgid ""
"The path to the ECH config, in PEM format. If empty, load from DNS will be "
"attempted."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1078
msgid "The path to the server certificate, in PEM format."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:199
msgid "The port must be unique."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:963
msgid "The port of the DNS server."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1215
msgid "The response code."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:776
msgid "The server private key, in PEM format."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:757
msgid "The server public key, in PEM format."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:461
msgid ""
"The tag of the upstream outbound.<br/>Other dial fields will be ignored when "
"enabled."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:139
#: htdocs/luci-static/resources/view/homeproxy/client.js:168
#: htdocs/luci-static/resources/view/homeproxy/client.js:531
msgid "The test interval in seconds."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:145
#: htdocs/luci-static/resources/view/homeproxy/client.js:174
#: htdocs/luci-static/resources/view/homeproxy/client.js:547
msgid "The test tolerance in milliseconds."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:807
#: htdocs/luci-static/resources/view/homeproxy/server.js:465
msgid ""
"The timeout (in seconds) that after performing a keepalive check, the client "
"will wait for activity. If no activity is detected, the connection will be "
"closed."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1043
#: htdocs/luci-static/resources/view/homeproxy/node.js:1385
msgid ""
"This is <strong>DANGEROUS</strong>, your traffic is almost like "
"<strong>PLAIN TEXT</strong>! Use at your own risk!"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:717
msgid ""
"This is the TUIC port of the UDP over TCP protocol, designed to provide a "
"QUIC stream based UDP relay mode that TUIC does not provide."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:188
#: htdocs/luci-static/resources/view/homeproxy/client.js:223
msgid "ThreatBook Public DNS (117.50.10.10)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:723
msgid ""
"Timeout for UDP connections.<br/>Setting a larger value than the UDP timeout "
"in inbounds will have no effect."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:918
msgid ""
"Timeout of rejected DNS response cache in seconds. <code>604800 (7d)</code> "
"is used by default."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:491
msgid ""
"To be compatible with Xray-core, set this to <code>Sec-WebSocket-Protocol</"
"code>."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:292
msgid ""
"To enable Tun support, you need to install <code>ip-full</code> and "
"<code>kmod-tun</code>"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:135
msgid "Trace"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:773
#: htdocs/luci-static/resources/view/homeproxy/server.js:399
msgid "Transport"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:439
#: htdocs/luci-static/resources/view/homeproxy/server.js:186
msgid "Trojan"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:441
#: htdocs/luci-static/resources/view/homeproxy/server.js:188
msgid "Tuic"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:290
msgid "Tun TCP/UDP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:947
#: htdocs/luci-static/resources/view/homeproxy/client.js:1333
#: htdocs/luci-static/resources/view/homeproxy/node.js:427
#: htdocs/luci-static/resources/view/homeproxy/server.js:175
msgid "Type"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:639
#: htdocs/luci-static/resources/view/homeproxy/client.js:948
#: htdocs/luci-static/resources/view/homeproxy/client.js:1100
#: htdocs/luci-static/resources/view/homeproxy/server.js:868
msgid "UDP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1167
#: htdocs/luci-static/resources/view/homeproxy/server.js:854
msgid "UDP Fragment"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:335
#: htdocs/luci-static/resources/view/homeproxy/server.js:859
msgid "UDP NAT expiration time"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1171
msgid "UDP over TCP"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:716
msgid "UDP over stream"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:709
msgid "UDP packet relay mode."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:708
msgid "UDP relay mode"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:722
msgid "UDP timeout"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:124
#: htdocs/luci-static/resources/view/homeproxy/client.js:153
#: htdocs/luci-static/resources/view/homeproxy/client.js:421
msgid "URLTest"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:131
#: htdocs/luci-static/resources/view/homeproxy/client.js:160
#: htdocs/luci-static/resources/view/homeproxy/client.js:496
msgid "URLTest nodes"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:690
#: htdocs/luci-static/resources/view/homeproxy/server.js:346
msgid "UUID"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:98
msgid "Unknown error."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:195
msgid "Unknown error: %s"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1135
msgid "Unsupported fingerprint!"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1409
msgid "Update %s subscriptions"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:89
msgid "Update failed."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1391
msgid "Update interval"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1392
msgid "Update interval of rule set."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1404
msgid "Update nodes from subscriptions"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1346
msgid "Update subscriptions via proxy."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1339
msgid "Update time"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1345
msgid "Update via proxy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1104
msgid "Upload ECH config"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:995
#: htdocs/luci-static/resources/view/homeproxy/server.js:523
msgid "Upload bandwidth"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:996
#: htdocs/luci-static/resources/view/homeproxy/server.js:524
msgid "Upload bandwidth in Mbps."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1085
#: htdocs/luci-static/resources/view/homeproxy/server.js:767
msgid "Upload certificate"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:786
msgid "Upload key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1088
#: htdocs/luci-static/resources/view/homeproxy/node.js:1107
#: htdocs/luci-static/resources/view/homeproxy/server.js:770
#: htdocs/luci-static/resources/view/homeproxy/server.js:789
msgid "Upload..."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:596
msgid "Use ACME TLS certificate issuer."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1031
#: htdocs/luci-static/resources/view/homeproxy/server.js:561
msgid ""
"Used to verify the hostname on the returned certificates unless insecure is "
"given."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:980
msgid "Used to verify the hostname on the returned certificates."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:642
#: htdocs/luci-static/resources/view/homeproxy/client.js:1114
msgid "User"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1379
msgid "User-Agent"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:458
#: htdocs/luci-static/resources/view/homeproxy/server.js:203
msgid "Username"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:444
#: htdocs/luci-static/resources/view/homeproxy/server.js:189
msgid "VLESS"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:445
#: htdocs/luci-static/resources/view/homeproxy/server.js:190
msgid "VMess"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:181
#: htdocs/luci-static/resources/view/homeproxy/client.js:219
msgid "WAN DNS (read from interface)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1463
msgid "WAN IP Policy"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:138
msgid "Warn"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:780
#: htdocs/luci-static/resources/view/homeproxy/server.js:406
msgid "WebSocket"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1370
msgid "Whitelist mode"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:443
msgid "WireGuard"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:912
msgid "WireGuard peer public key."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:919
msgid "WireGuard pre-shared key."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:904
msgid "WireGuard requires base64-encoded private keys."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:498
msgid "Write proxy protocol in the connection header."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:889
#: htdocs/luci-static/resources/view/homeproxy/node.js:1392
msgid "Xudp (Xray-core)"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:259
msgid "Your %s was successfully uploaded. Size: %sB."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:630
msgid "ZeroSSL"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1090
#: htdocs/luci-static/resources/view/homeproxy/server.js:772
msgid "certificate"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:716
msgid "connect UDP connections"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1050
#: htdocs/luci-static/resources/view/homeproxy/node.js:1058
#: htdocs/luci-static/resources/view/homeproxy/server.js:572
#: htdocs/luci-static/resources/view/homeproxy/server.js:580
msgid "default"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:53
msgid "failed"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:776
#: htdocs/luci-static/resources/view/homeproxy/server.js:402
msgid "gRPC"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:818
msgid "gRPC permit without stream"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:813
#: htdocs/luci-static/resources/view/homeproxy/server.js:430
msgid "gRPC service name"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:311
msgid "gVisor"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:306
#: htdocs/luci-static/resources/homeproxy.js:326
#: htdocs/luci-static/resources/view/homeproxy/client.js:195
#: htdocs/luci-static/resources/view/homeproxy/client.js:230
#: htdocs/luci-static/resources/view/homeproxy/client.js:504
#: htdocs/luci-static/resources/view/homeproxy/client.js:1356
#: htdocs/luci-static/resources/view/homeproxy/node.js:488
#: htdocs/luci-static/resources/view/homeproxy/node.js:1131
#: htdocs/luci-static/resources/view/homeproxy/server.js:235
msgid "non-empty value"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:631
#: htdocs/luci-static/resources/view/homeproxy/node.js:887
#: htdocs/luci-static/resources/view/homeproxy/node.js:1390
msgid "none"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:888
#: htdocs/luci-static/resources/view/homeproxy/node.js:1391
msgid "packet addr (v2ray-core v5+)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:50
msgid "passed"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/server.js:791
msgid "private key"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:631
msgid "quic-go / uquic chrome"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:285
msgid "sing-box client"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:288
msgid "sing-box server"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1113
msgid "uTLS fingerprint"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:1114
msgid ""
"uTLS is a fork of \"crypto/tls\", which provides ClientHello fingerprinting "
"resistance."
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/status.js:59
msgid "unchecked"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:237
#: htdocs/luci-static/resources/view/homeproxy/node.js:1302
msgid "unique UCI identifier"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:317
msgid "unique value"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:500
#: htdocs/luci-static/resources/view/homeproxy/node.js:645
#: htdocs/luci-static/resources/view/homeproxy/node.js:1178
msgid "v1"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:501
#: htdocs/luci-static/resources/view/homeproxy/node.js:646
#: htdocs/luci-static/resources/view/homeproxy/node.js:1179
msgid "v2"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/node.js:647
msgid "v3"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:207
#: htdocs/luci-static/resources/view/homeproxy/client.js:211
#: htdocs/luci-static/resources/view/homeproxy/client.js:241
#: htdocs/luci-static/resources/view/homeproxy/client.js:245
msgid "valid DNS server address"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:518
#: htdocs/luci-static/resources/view/homeproxy/client.js:521
#: htdocs/luci-static/resources/view/homeproxy/client.js:1361
#: htdocs/luci-static/resources/view/homeproxy/client.js:1364
#: htdocs/luci-static/resources/view/homeproxy/node.js:1356
#: htdocs/luci-static/resources/view/homeproxy/node.js:1359
msgid "valid URL"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:271
msgid "valid base64 key with %d characters"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:1506
#: htdocs/luci-static/resources/view/homeproxy/client.js:1538
msgid "valid hostname"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:297
msgid "valid port range (port1:port2)"
msgstr ""
#: htdocs/luci-static/resources/view/homeproxy/client.js:274
msgid "valid port value"
msgstr ""
#: htdocs/luci-static/resources/homeproxy.js:328
msgid "valid uuid"
msgstr ""
|
2929004360/ruoyi-sign
| 12,931
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/uuid/UUID.java
|
package com.ruoyi.common.utils.uuid;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import com.ruoyi.common.exception.UtilException;
/**
* 提供通用唯一识别码(universally unique identifier)(UUID)实现
*
* @author ruoyi
*/
public final class UUID implements java.io.Serializable, Comparable<UUID>
{
private static final long serialVersionUID = -1185015143654744140L;
/**
* SecureRandom 的单例
*
*/
private static class Holder
{
static final SecureRandom numberGenerator = getSecureRandom();
}
/** 此UUID的最高64有效位 */
private final long mostSigBits;
/** 此UUID的最低64有效位 */
private final long leastSigBits;
/**
* 私有构造
*
* @param data 数据
*/
private UUID(byte[] data)
{
long msb = 0;
long lsb = 0;
assert data.length == 16 : "data must be 16 bytes in length";
for (int i = 0; i < 8; i++)
{
msb = (msb << 8) | (data[i] & 0xff);
}
for (int i = 8; i < 16; i++)
{
lsb = (lsb << 8) | (data[i] & 0xff);
}
this.mostSigBits = msb;
this.leastSigBits = lsb;
}
/**
* 使用指定的数据构造新的 UUID。
*
* @param mostSigBits 用于 {@code UUID} 的最高有效 64 位
* @param leastSigBits 用于 {@code UUID} 的最低有效 64 位
*/
public UUID(long mostSigBits, long leastSigBits)
{
this.mostSigBits = mostSigBits;
this.leastSigBits = leastSigBits;
}
/**
* 获取类型 4(伪随机生成的)UUID 的静态工厂。
*
* @return 随机生成的 {@code UUID}
*/
public static UUID fastUUID()
{
return randomUUID(false);
}
/**
* 获取类型 4(伪随机生成的)UUID 的静态工厂。 使用加密的强伪随机数生成器生成该 UUID。
*
* @return 随机生成的 {@code UUID}
*/
public static UUID randomUUID()
{
return randomUUID(true);
}
/**
* 获取类型 4(伪随机生成的)UUID 的静态工厂。 使用加密的强伪随机数生成器生成该 UUID。
*
* @param isSecure 是否使用{@link SecureRandom}如果是可以获得更安全的随机码,否则可以得到更好的性能
* @return 随机生成的 {@code UUID}
*/
public static UUID randomUUID(boolean isSecure)
{
final Random ng = isSecure ? Holder.numberGenerator : getRandom();
byte[] randomBytes = new byte[16];
ng.nextBytes(randomBytes);
randomBytes[6] &= 0x0f; /* clear version */
randomBytes[6] |= 0x40; /* set to version 4 */
randomBytes[8] &= 0x3f; /* clear variant */
randomBytes[8] |= 0x80; /* set to IETF variant */
return new UUID(randomBytes);
}
/**
* 根据指定的字节数组获取类型 3(基于名称的)UUID 的静态工厂。
*
* @param name 用于构造 UUID 的字节数组。
*
* @return 根据指定数组生成的 {@code UUID}
*/
public static UUID nameUUIDFromBytes(byte[] name)
{
MessageDigest md;
try
{
md = MessageDigest.getInstance("MD5");
}
catch (NoSuchAlgorithmException nsae)
{
throw new InternalError("MD5 not supported");
}
byte[] md5Bytes = md.digest(name);
md5Bytes[6] &= 0x0f; /* clear version */
md5Bytes[6] |= 0x30; /* set to version 3 */
md5Bytes[8] &= 0x3f; /* clear variant */
md5Bytes[8] |= 0x80; /* set to IETF variant */
return new UUID(md5Bytes);
}
/**
* 根据 {@link #toString()} 方法中描述的字符串标准表示形式创建{@code UUID}。
*
* @param name 指定 {@code UUID} 字符串
* @return 具有指定值的 {@code UUID}
* @throws IllegalArgumentException 如果 name 与 {@link #toString} 中描述的字符串表示形式不符抛出此异常
*
*/
public static UUID fromString(String name)
{
String[] components = name.split("-");
if (components.length != 5)
{
throw new IllegalArgumentException("Invalid UUID string: " + name);
}
for (int i = 0; i < 5; i++)
{
components[i] = "0x" + components[i];
}
long mostSigBits = Long.decode(components[0]).longValue();
mostSigBits <<= 16;
mostSigBits |= Long.decode(components[1]).longValue();
mostSigBits <<= 16;
mostSigBits |= Long.decode(components[2]).longValue();
long leastSigBits = Long.decode(components[3]).longValue();
leastSigBits <<= 48;
leastSigBits |= Long.decode(components[4]).longValue();
return new UUID(mostSigBits, leastSigBits);
}
/**
* 返回此 UUID 的 128 位值中的最低有效 64 位。
*
* @return 此 UUID 的 128 位值中的最低有效 64 位。
*/
public long getLeastSignificantBits()
{
return leastSigBits;
}
/**
* 返回此 UUID 的 128 位值中的最高有效 64 位。
*
* @return 此 UUID 的 128 位值中最高有效 64 位。
*/
public long getMostSignificantBits()
{
return mostSigBits;
}
/**
* 与此 {@code UUID} 相关联的版本号. 版本号描述此 {@code UUID} 是如何生成的。
* <p>
* 版本号具有以下含意:
* <ul>
* <li>1 基于时间的 UUID
* <li>2 DCE 安全 UUID
* <li>3 基于名称的 UUID
* <li>4 随机生成的 UUID
* </ul>
*
* @return 此 {@code UUID} 的版本号
*/
public int version()
{
// Version is bits masked by 0x000000000000F000 in MS long
return (int) ((mostSigBits >> 12) & 0x0f);
}
/**
* 与此 {@code UUID} 相关联的变体号。变体号描述 {@code UUID} 的布局。
* <p>
* 变体号具有以下含意:
* <ul>
* <li>0 为 NCS 向后兼容保留
* <li>2 <a href="http://www.ietf.org/rfc/rfc4122.txt">IETF RFC 4122</a>(Leach-Salz), 用于此类
* <li>6 保留,微软向后兼容
* <li>7 保留供以后定义使用
* </ul>
*
* @return 此 {@code UUID} 相关联的变体号
*/
public int variant()
{
// This field is composed of a varying number of bits.
// 0 - - Reserved for NCS backward compatibility
// 1 0 - The IETF aka Leach-Salz variant (used by this class)
// 1 1 0 Reserved, Microsoft backward compatibility
// 1 1 1 Reserved for future definition.
return (int) ((leastSigBits >>> (64 - (leastSigBits >>> 62))) & (leastSigBits >> 63));
}
/**
* 与此 UUID 相关联的时间戳值。
*
* <p>
* 60 位的时间戳值根据此 {@code UUID} 的 time_low、time_mid 和 time_hi 字段构造。<br>
* 所得到的时间戳以 100 毫微秒为单位,从 UTC(通用协调时间) 1582 年 10 月 15 日零时开始。
*
* <p>
* 时间戳值仅在在基于时间的 UUID(其 version 类型为 1)中才有意义。<br>
* 如果此 {@code UUID} 不是基于时间的 UUID,则此方法抛出 UnsupportedOperationException。
*
* @throws UnsupportedOperationException 如果此 {@code UUID} 不是 version 为 1 的 UUID。
*/
public long timestamp() throws UnsupportedOperationException
{
checkTimeBase();
return (mostSigBits & 0x0FFFL) << 48//
| ((mostSigBits >> 16) & 0x0FFFFL) << 32//
| mostSigBits >>> 32;
}
/**
* 与此 UUID 相关联的时钟序列值。
*
* <p>
* 14 位的时钟序列值根据此 UUID 的 clock_seq 字段构造。clock_seq 字段用于保证在基于时间的 UUID 中的时间唯一性。
* <p>
* {@code clockSequence} 值仅在基于时间的 UUID(其 version 类型为 1)中才有意义。 如果此 UUID 不是基于时间的 UUID,则此方法抛出
* UnsupportedOperationException。
*
* @return 此 {@code UUID} 的时钟序列
*
* @throws UnsupportedOperationException 如果此 UUID 的 version 不为 1
*/
public int clockSequence() throws UnsupportedOperationException
{
checkTimeBase();
return (int) ((leastSigBits & 0x3FFF000000000000L) >>> 48);
}
/**
* 与此 UUID 相关的节点值。
*
* <p>
* 48 位的节点值根据此 UUID 的 node 字段构造。此字段旨在用于保存机器的 IEEE 802 地址,该地址用于生成此 UUID 以保证空间唯一性。
* <p>
* 节点值仅在基于时间的 UUID(其 version 类型为 1)中才有意义。<br>
* 如果此 UUID 不是基于时间的 UUID,则此方法抛出 UnsupportedOperationException。
*
* @return 此 {@code UUID} 的节点值
*
* @throws UnsupportedOperationException 如果此 UUID 的 version 不为 1
*/
public long node() throws UnsupportedOperationException
{
checkTimeBase();
return leastSigBits & 0x0000FFFFFFFFFFFFL;
}
/**
* 返回此{@code UUID} 的字符串表现形式。
*
* <p>
* UUID 的字符串表示形式由此 BNF 描述:
*
* <pre>
* {@code
* UUID = <time_low>-<time_mid>-<time_high_and_version>-<variant_and_sequence>-<node>
* time_low = 4*<hexOctet>
* time_mid = 2*<hexOctet>
* time_high_and_version = 2*<hexOctet>
* variant_and_sequence = 2*<hexOctet>
* node = 6*<hexOctet>
* hexOctet = <hexDigit><hexDigit>
* hexDigit = [0-9a-fA-F]
* }
* </pre>
*
* </blockquote>
*
* @return 此{@code UUID} 的字符串表现形式
* @see #toString(boolean)
*/
@Override
public String toString()
{
return toString(false);
}
/**
* 返回此{@code UUID} 的字符串表现形式。
*
* <p>
* UUID 的字符串表示形式由此 BNF 描述:
*
* <pre>
* {@code
* UUID = <time_low>-<time_mid>-<time_high_and_version>-<variant_and_sequence>-<node>
* time_low = 4*<hexOctet>
* time_mid = 2*<hexOctet>
* time_high_and_version = 2*<hexOctet>
* variant_and_sequence = 2*<hexOctet>
* node = 6*<hexOctet>
* hexOctet = <hexDigit><hexDigit>
* hexDigit = [0-9a-fA-F]
* }
* </pre>
*
* </blockquote>
*
* @param isSimple 是否简单模式,简单模式为不带'-'的UUID字符串
* @return 此{@code UUID} 的字符串表现形式
*/
public String toString(boolean isSimple)
{
final StringBuilder builder = new StringBuilder(isSimple ? 32 : 36);
// time_low
builder.append(digits(mostSigBits >> 32, 8));
if (!isSimple)
{
builder.append('-');
}
// time_mid
builder.append(digits(mostSigBits >> 16, 4));
if (!isSimple)
{
builder.append('-');
}
// time_high_and_version
builder.append(digits(mostSigBits, 4));
if (!isSimple)
{
builder.append('-');
}
// variant_and_sequence
builder.append(digits(leastSigBits >> 48, 4));
if (!isSimple)
{
builder.append('-');
}
// node
builder.append(digits(leastSigBits, 12));
return builder.toString();
}
/**
* 返回此 UUID 的哈希码。
*
* @return UUID 的哈希码值。
*/
@Override
public int hashCode()
{
long hilo = mostSigBits ^ leastSigBits;
return ((int) (hilo >> 32)) ^ (int) hilo;
}
/**
* 将此对象与指定对象比较。
* <p>
* 当且仅当参数不为 {@code null}、而是一个 UUID 对象、具有与此 UUID 相同的 varriant、包含相同的值(每一位均相同)时,结果才为 {@code true}。
*
* @param obj 要与之比较的对象
*
* @return 如果对象相同,则返回 {@code true};否则返回 {@code false}
*/
@Override
public boolean equals(Object obj)
{
if ((null == obj) || (obj.getClass() != UUID.class))
{
return false;
}
UUID id = (UUID) obj;
return (mostSigBits == id.mostSigBits && leastSigBits == id.leastSigBits);
}
// Comparison Operations
/**
* 将此 UUID 与指定的 UUID 比较。
*
* <p>
* 如果两个 UUID 不同,且第一个 UUID 的最高有效字段大于第二个 UUID 的对应字段,则第一个 UUID 大于第二个 UUID。
*
* @param val 与此 UUID 比较的 UUID
*
* @return 在此 UUID 小于、等于或大于 val 时,分别返回 -1、0 或 1。
*
*/
@Override
public int compareTo(UUID val)
{
// The ordering is intentionally set up so that the UUIDs
// can simply be numerically compared as two numbers
return (this.mostSigBits < val.mostSigBits ? -1 : //
(this.mostSigBits > val.mostSigBits ? 1 : //
(this.leastSigBits < val.leastSigBits ? -1 : //
(this.leastSigBits > val.leastSigBits ? 1 : //
0))));
}
// -------------------------------------------------------------------------------------------------------------------
// Private method start
/**
* 返回指定数字对应的hex值
*
* @param val 值
* @param digits 位
* @return 值
*/
private static String digits(long val, int digits)
{
long hi = 1L << (digits * 4);
return Long.toHexString(hi | (val & (hi - 1))).substring(1);
}
/**
* 检查是否为time-based版本UUID
*/
private void checkTimeBase()
{
if (version() != 1)
{
throw new UnsupportedOperationException("Not a time-based UUID");
}
}
/**
* 获取{@link SecureRandom},类提供加密的强随机数生成器 (RNG)
*
* @return {@link SecureRandom}
*/
public static SecureRandom getSecureRandom()
{
try
{
return SecureRandom.getInstance("SHA1PRNG");
}
catch (NoSuchAlgorithmException e)
{
throw new UtilException(e);
}
}
/**
* 获取随机数生成器对象<br>
* ThreadLocalRandom是JDK 7之后提供并发产生随机数,能够解决多个线程发生的竞争争夺。
*
* @return {@link ThreadLocalRandom}
*/
public static ThreadLocalRandom getRandom()
{
return ThreadLocalRandom.current();
}
}
|
2977094657/BilibiliHistoryFetcher
| 6,424
|
scripts/export_to_excel.py
|
import json
import logging
import os
import sqlite3
import traceback
from datetime import datetime
import pandas as pd
from openpyxl.utils import get_column_letter
from scripts.utils import load_config, get_output_path
config = load_config()
# 配置日志记录
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def create_connection(db_file):
"""创建一个到SQLite数据库的连接"""
if not os.path.exists(db_file):
logger.error(f"数据库文件 {db_file} 不存在。")
return None
conn = None
try:
conn = sqlite3.connect(db_file)
logger.info(f"成功连接到SQLite数据库: {db_file}")
except sqlite3.Error as e:
logger.error(f"连接SQLite数据库时出错: {e}")
return conn
def get_current_year():
return datetime.now().year
def safe_json_loads(value):
try:
if value is None or value == 'null':
return []
return json.loads(value)
except json.JSONDecodeError:
logger.warning(f"JSON解析错误,值为: {value}")
return []
except Exception as e:
logger.error(f"处理JSON时发生未知错误: {e}, 值为: {value}")
return []
def export_bilibili_history(year=None, month=None, start_date=None, end_date=None):
"""导出B站历史记录到Excel文件
Args:
year: 要导出的年份,如果不指定则使用当前年份
month: 要导出的月份(1-12),如果指定则只导出该月数据
start_date: 开始日期,格式为'YYYY-MM-DD',如果指定则从该日期开始导出
end_date: 结束日期,格式为'YYYY-MM-DD',如果指定则导出到该日期为止
"""
full_db_file = get_output_path(config['db_file'])
target_year = year if year is not None else get_current_year()
# 构建文件名
filename_parts = ['bilibili_history']
if year is not None:
filename_parts.append(str(target_year))
if month is not None:
filename_parts.append(f"{month:02d}月")
if start_date and end_date:
filename_parts.append(f"{start_date}至{end_date}")
elif start_date:
filename_parts.append(f"从{start_date}开始")
elif end_date:
filename_parts.append(f"至{end_date}")
excel_file = get_output_path(f'{"_".join(filename_parts)}.xlsx')
conn = create_connection(full_db_file)
if conn is None:
return {"status": "error", "message": f"无法连接到数据库 {full_db_file}。数据库文件可能不存在。"}
try:
# 如果指定了日期范围,需要确定要查询的年份表
years_to_query = [target_year]
# 如果指定了日期范围,可能需要查询多个年份的表
if start_date or end_date:
# 获取开始和结束年份
start_year = int(start_date.split('-')[0]) if start_date else target_year
end_year = int(end_date.split('-')[0]) if end_date else target_year
# 确保年份范围有效
start_year = max(2000, min(start_year, datetime.now().year))
end_year = max(2000, min(end_year, datetime.now().year))
# 生成要查询的年份列表
years_to_query = list(range(start_year, end_year + 1))
logger.info(f"将查询以下年份的表: {years_to_query}")
# 获取所有存在的表
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'bilibili_history_%'")
existing_tables = [row[0] for row in cursor.fetchall()]
logger.info(f"存在的表: {existing_tables}")
# 过滤出实际存在的表
tables_to_query = [f"bilibili_history_{year}" for year in years_to_query if f"bilibili_history_{year}" in existing_tables]
if not tables_to_query:
return {"status": "error", "message": f"没有找到符合条件的数据表。"}
logger.info(f"将查询以下表: {tables_to_query}")
# 准备查询条件
conditions = []
params = []
# 如果指定了月份,添加月份筛选条件
if month is not None:
conditions.append("strftime('%m', datetime(view_at, 'unixepoch', 'localtime')) = ?")
params.append(f"{month:02d}")
# 如果指定了开始日期,添加开始日期筛选条件
if start_date:
conditions.append("date(view_at, 'unixepoch', 'localtime') >= ?")
params.append(start_date)
# 如果指定了结束日期,添加结束日期筛选条件
if end_date:
conditions.append("date(view_at, 'unixepoch', 'localtime') <= ?")
params.append(end_date)
# 准备查询条件字符串
condition_str = ""
if conditions:
condition_str = " WHERE " + " AND ".join(conditions)
# 从所有表中查询数据
all_data = []
for table in tables_to_query:
query = f"SELECT * FROM {table}{condition_str}"
# 打印调试信息
logger.info(f"执行SQL查询: {query}")
logger.info(f"参数: {params}")
# 使用params参数执行查询
table_df = pd.read_sql_query(query, conn, params=params)
if not table_df.empty:
all_data.append(table_df)
logger.info(f"从表 {table} 中获取了 {len(table_df)} 条数据")
# 合并所有表的数据
if all_data:
df = pd.concat(all_data, ignore_index=True)
logger.info(f"合并后共有 {len(df)} 条数据")
else:
df = pd.DataFrame()
if df.empty:
return {"status": "error", "message": f"没有找到符合条件的数据。"}
# 将 JSON 字符串转换为列表,处理 null 值
if 'covers' in df.columns:
df['covers'] = df['covers'].apply(safe_json_loads)
# 清理列名,移除非法字符并确保列名有效
df.columns = df.columns.str.replace(r'[^\w\s]', '', regex=True).str.strip()
df.columns = [f"Column_{i}" if not col or not col[0].isalpha() else col for i, col in enumerate(df.columns)]
# 导出到Excel
with pd.ExcelWriter(excel_file, engine='openpyxl') as writer:
df.to_excel(writer, index=False, sheet_name='BilibiliHistory')
# 调整列宽
worksheet = writer.sheets['BilibiliHistory']
for idx, col in enumerate(df.columns):
series = df[col]
max_len = max((
series.astype(str).map(len).max(), # 最长的值
len(str(col)) # 列名长度
)) + 1 # 为了美观增加一个字符的宽度
worksheet.column_dimensions[get_column_letter(idx + 1)].width = max_len
logger.info(f"数据已成功导出到 {excel_file}")
return {"status": "success", "message": f"数据已成功导出到 {excel_file}"}
except Exception as e:
logger.error(f"导出数据时发生错误: {e}")
traceback.print_exc()
return {"status": "error", "message": f"导出数据时发生错误: {e}"}
finally:
if conn:
conn.close()
# 如果该脚本直接运行,则调用导出函数
if __name__ == '__main__':
result = export_bilibili_history()
if result["status"] == "success":
print(result["message"])
else:
print(f"错误: {result['message']}")
|
2977094657/BilibiliHistoryFetcher
| 5,228
|
scripts/wbi_sign.py
|
import hashlib
import json
import time
import urllib.parse
from typing import Dict, Any
import requests
# 混淆用的字符表
MIXIN_KEY_ENC_TAB = [
46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49,
33, 9, 42, 19, 29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40,
61, 26, 17, 0, 1, 60, 51, 30, 4, 22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11,
36, 20, 34, 44, 52
]
# 缓存的WBI密钥
_cached_wbi_keys = {
"img_key": "",
"sub_key": "",
"time": 0
}
def get_mixin_key(orig: str) -> str:
"""
对 imgKey 和 subKey 进行字符顺序打乱编码
取MIXIN_KEY_ENC_TAB中的前32个字符
"""
mixed_key = ""
for i in MIXIN_KEY_ENC_TAB:
if i < len(orig):
mixed_key += orig[i]
return mixed_key[:32]
def fetch_wbi_keys() -> Dict[str, str]:
"""
获取最新的 WBI 签名密钥
"""
global _cached_wbi_keys
# 检查缓存是否过期(1小时)
current_time = int(time.time())
if _cached_wbi_keys["time"] > 0 and current_time - _cached_wbi_keys["time"] < 3600:
return {
"img_key": _cached_wbi_keys["img_key"],
"sub_key": _cached_wbi_keys["sub_key"]
}
try:
# 从配置中读取SESSDATA
from scripts.utils import load_config
config = load_config()
sessdata = config.get('SESSDATA', '')
# 设置请求头,解决412错误,添加Cookie认证
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"Referer": "https://www.bilibili.com/"
}
# 如果有SESSDATA,添加到Cookie
if sessdata:
headers["Cookie"] = f"SESSDATA={sessdata}"
# 从B站首页获取最新的 wbi_img 和 wbi_sub
resp = requests.get(
"https://api.bilibili.com/x/web-interface/nav",
headers=headers,
timeout=10
)
resp.raise_for_status()
json_content = resp.json()
if json_content["code"] != 0:
raise Exception(f"获取WBI密钥失败: {json_content['message']}")
img_url = json_content["data"]["wbi_img"]["img_url"]
sub_url = json_content["data"]["wbi_img"]["sub_url"]
img_key = img_url.split("/")[-1].split(".")[0]
sub_key = sub_url.split("/")[-1].split(".")[0]
# 更新缓存
_cached_wbi_keys = {
"img_key": img_key,
"sub_key": sub_key,
"time": current_time
}
return {
"img_key": img_key,
"sub_key": sub_key
}
except Exception as e:
print(f"获取WBI密钥时出错: {e}")
# 如果有缓存,返回缓存的密钥
if _cached_wbi_keys["img_key"] and _cached_wbi_keys["sub_key"]:
return {
"img_key": _cached_wbi_keys["img_key"],
"sub_key": _cached_wbi_keys["sub_key"]
}
# 否则返回空值
return {"img_key": "", "sub_key": ""}
def get_wbi_sign(params: Dict[str, Any]) -> Dict[str, Any]:
"""
对参数进行 WBI 签名
"""
# 获取 WBI 密钥
keys = fetch_wbi_keys()
img_key, sub_key = keys["img_key"], keys["sub_key"]
# 如果获取密钥失败,返回原始参数
if not img_key or not sub_key:
print("获取WBI密钥失败,返回未签名的参数")
return params
# 返回签名后的参数
return enc_wbi(params, img_key, sub_key)
def enc_wbi(params: Dict[str, Any], img_key: str, sub_key: str) -> Dict[str, Any]:
"""
为请求参数进行 wbi 签名
"""
# 合并密钥并进行混淆
mixin_key = get_mixin_key(img_key + sub_key)
# 添加 wts 参数(当前时间戳)
params_with_wts = dict(params)
curr_time = int(time.time())
params_with_wts["wts"] = curr_time
# 按照参数名排序
params_sorted = dict(sorted(params_with_wts.items()))
# 过滤 value 中的 "!'()*" 字符
filtered_params = {}
for k, v in params_sorted.items():
filtered_value = str(v)
for char in "!'()*":
filtered_value = filtered_value.replace(char, '')
filtered_params[k] = filtered_value
# 构造待签名的字符串
query = urllib.parse.urlencode(filtered_params)
# 计算 w_rid
w_rid = hashlib.md5((query + mixin_key).encode()).hexdigest()
# 添加签名参数
result_params = dict(params)
result_params["wts"] = curr_time
result_params["w_rid"] = w_rid
return result_params
# 测试函数
if __name__ == "__main__":
# 测试参数
test_params = {
"bvid": "BV1L94y1H7CV",
"cid": 1335073288,
"up_mid": 297242063
}
# 获取签名后的参数
signed_params = get_wbi_sign(test_params)
print("原始参数:", test_params)
print("签名后的参数:", signed_params)
# 测试请求
url = "https://api.bilibili.com/x/web-interface/view/conclusion/get"
# 添加请求头
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"Referer": "https://www.bilibili.com/"
}
response = requests.get(url, params=signed_params, headers=headers)
print("\n请求URL:", response.url)
print("响应状态码:", response.status_code)
try:
data = response.json()
print("响应内容:", json.dumps(data, ensure_ascii=False, indent=2))
except:
print("响应内容解析失败:", response.text)
|
2929004360/ruoyi-sign
| 1,952
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/uuid/Seq.java
|
package com.ruoyi.common.utils.uuid;
import java.util.concurrent.atomic.AtomicInteger;
import com.ruoyi.common.utils.DateUtils;
import com.ruoyi.common.utils.StringUtils;
/**
* @author ruoyi 序列生成类
*/
public class Seq
{
// 通用序列类型
public static final String commSeqType = "COMMON";
// 上传序列类型
public static final String uploadSeqType = "UPLOAD";
// 通用接口序列数
private static AtomicInteger commSeq = new AtomicInteger(1);
// 上传接口序列数
private static AtomicInteger uploadSeq = new AtomicInteger(1);
// 机器标识
private static final String machineCode = "A";
/**
* 获取通用序列号
*
* @return 序列值
*/
public static String getId()
{
return getId(commSeqType);
}
/**
* 默认16位序列号 yyMMddHHmmss + 一位机器标识 + 3长度循环递增字符串
*
* @return 序列值
*/
public static String getId(String type)
{
AtomicInteger atomicInt = commSeq;
if (uploadSeqType.equals(type))
{
atomicInt = uploadSeq;
}
return getId(atomicInt, 3);
}
/**
* 通用接口序列号 yyMMddHHmmss + 一位机器标识 + length长度循环递增字符串
*
* @param atomicInt 序列数
* @param length 数值长度
* @return 序列值
*/
public static String getId(AtomicInteger atomicInt, int length)
{
String result = DateUtils.dateTimeNow();
result += machineCode;
result += getSeq(atomicInt, length);
return result;
}
/**
* 序列循环递增字符串[1, 10 的 (length)幂次方), 用0左补齐length位数
*
* @return 序列值
*/
private synchronized static String getSeq(AtomicInteger atomicInt, int length)
{
// 先取值再+1
int value = atomicInt.getAndIncrement();
// 如果更新后值>=10 的 (length)幂次方则重置为1
int maxSeq = (int) Math.pow(10, length);
if (atomicInt.get() >= maxSeq)
{
atomicInt.set(1);
}
// 转字符串,用0左补齐
return StringUtils.padl(value, length);
}
}
|
2929004360/ruoyi-sign
| 1,456
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/http/HttpHelper.java
|
package com.ruoyi.common.utils.http;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import javax.servlet.ServletRequest;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 通用http工具封装
*
* @author ruoyi
*/
public class HttpHelper
{
private static final Logger LOGGER = LoggerFactory.getLogger(HttpHelper.class);
public static String getBodyString(ServletRequest request)
{
StringBuilder sb = new StringBuilder();
BufferedReader reader = null;
try (InputStream inputStream = request.getInputStream())
{
reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
String line = "";
while ((line = reader.readLine()) != null)
{
sb.append(line);
}
}
catch (IOException e)
{
LOGGER.warn("getBodyString出现问题!");
}
finally
{
if (reader != null)
{
try
{
reader.close();
}
catch (IOException e)
{
LOGGER.error(ExceptionUtils.getMessage(e));
}
}
}
return sb.toString();
}
}
|
2977094657/BilibiliHistoryFetcher
| 5,281
|
routers/image_downloader.py
|
import os
import sqlite3
from typing import Optional
from fastapi import APIRouter, BackgroundTasks, HTTPException
from fastapi.responses import FileResponse
from scripts.image_downloader import ImageDownloader
from scripts.utils import get_output_path
router = APIRouter()
downloader = ImageDownloader()
def get_history_db():
"""获取历史记录数据库连接"""
db_path = get_output_path('bilibili_history.db')
return sqlite3.connect(db_path)
@router.post("/start", summary="开始下载图片")
async def start_download(
background_tasks: BackgroundTasks,
year: Optional[int] = None,
use_sessdata: bool = True
):
"""开始下载图片
Args:
year: 指定年份,不指定则下载所有年份
use_sessdata: 是否在下载图片时使用SESSDATA(对于公开内容如视频封面和头像,可以不使用SESSDATA)
"""
# 包装下载函数和状态更新
def download_with_status_update(year=None, use_sessdata=True):
try:
# 执行下载
downloader.start_download(year, use_sessdata)
except Exception as e:
print(f"下载过程发生错误: {str(e)}")
finally:
# 确保无论下载成功还是失败,状态都会被设置为已完成
print("\n=== 下载任务完成,更新状态 ===")
downloader.is_downloading = False
print("下载状态已设置为已完成")
# 在后台任务中执行包装函数
background_tasks.add_task(download_with_status_update, year, use_sessdata)
return {
"status": "success",
"message": f"开始下载{'所有年份' if year is None else f'{year}年'}的图片"
}
@router.post("/stop", summary="停止下载图片")
async def stop_download():
"""停止当前下载任务
Returns:
dict: 包含停止状态和当前下载统计的响应
"""
try:
result = downloader.stop_download()
return result
except Exception as e:
return {
"status": "error",
"message": f"停止下载失败: {str(e)}"
}
@router.get("/status", summary="获取下载状态")
async def get_status():
"""获取下载状态"""
stats = downloader.get_download_stats()
return {
"status": "success",
"data": stats
}
@router.post("/clear", summary="清空所有图片")
async def clear_images():
"""清空所有图片和下载状态"""
try:
success = downloader.clear_all_images()
if success:
return {
"status": "success",
"message": "已清空所有图片和下载状态",
"data": {
"cleared_paths": [
"output/images/covers",
"output/images/avatars",
"output/images/orphaned_covers",
"output/images/orphaned_avatars"
],
"status_file": "output/download_status.json"
}
}
else:
return {
"status": "error",
"message": "清空图片失败,请查看日志了解详细信息"
}
except Exception as e:
return {
"status": "error",
"message": f"清空图片时发生错误: {str(e)}"
}
@router.get("/local/{image_type}/{file_hash}", summary="获取本地图片")
async def get_local_image(image_type: str, file_hash: str):
"""获取本地图片
Args:
image_type: 图片类型 (covers 或 avatars)
file_hash: 图片文件的哈希值
Returns:
FileResponse: 图片文件响应
"""
# 验证图片类型
if image_type not in ('covers', 'avatars'):
raise HTTPException(
status_code=400,
detail=f"无效的图片类型: {image_type}"
)
try:
# 构建图片路径
base_path = get_output_path('images')
type_path = os.path.join(base_path, image_type)
sub_dir = file_hash[:2] # 使用哈希的前两位作为子目录
# 获取所有年份目录
years = []
if os.path.exists(type_path):
for item in os.listdir(type_path):
if item.isdigit():
years.append(item)
# 按年份倒序搜索图片
for year in sorted(years, reverse=True):
year_path = os.path.join(type_path, year)
img_dir = os.path.join(year_path, sub_dir)
if not os.path.exists(img_dir):
continue
# 查找所有可能的图片文件扩展名
for ext in ('.jpg', '.jpeg', '.png', '.webp', '.gif'):
img_path = os.path.join(img_dir, f"{file_hash}{ext}")
if os.path.exists(img_path):
print(f"找到图片文件: {img_path}")
return FileResponse(
img_path,
media_type=f"image/{ext[1:]}" if ext != '.jpg' else "image/jpeg"
)
# 如果在年份目录中没有找到,尝试在根目录中查找
img_dir = os.path.join(type_path, sub_dir)
if os.path.exists(img_dir):
for ext in ('.jpg', '.jpeg', '.png', '.webp', '.gif'):
img_path = os.path.join(img_dir, f"{file_hash}{ext}")
if os.path.exists(img_path):
print(f"找到图片文件: {img_path}")
return FileResponse(
img_path,
media_type=f"image/{ext[1:]}" if ext != '.jpg' else "image/jpeg"
)
# 如果没有找到任何匹配的文件
raise HTTPException(
status_code=404,
detail=f"图片不存在: {file_hash}"
)
except Exception as e:
if isinstance(e, HTTPException):
raise e
print(f"获取本地图片时出错: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"获取图片失败: {str(e)}"
)
|
2929004360/ruoyi-sign
| 9,047
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/http/HttpUtils.java
|
package com.ruoyi.common.utils.http;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.ConnectException;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.StandardCharsets;
import java.security.cert.X509Certificate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ruoyi.common.constant.Constants;
import com.ruoyi.common.utils.StringUtils;
/**
* 通用http发送方法
*
* @author ruoyi
*/
public class HttpUtils
{
private static final Logger log = LoggerFactory.getLogger(HttpUtils.class);
/**
* 向指定 URL 发送GET方法的请求
*
* @param url 发送请求的 URL
* @return 所代表远程资源的响应结果
*/
public static String sendGet(String url)
{
return sendGet(url, StringUtils.EMPTY);
}
/**
* 向指定 URL 发送GET方法的请求
*
* @param url 发送请求的 URL
* @param param 请求参数,请求参数应该是 name1=value1&name2=value2 的形式。
* @return 所代表远程资源的响应结果
*/
public static String sendGet(String url, String param)
{
return sendGet(url, param, Constants.UTF8);
}
/**
* 向指定 URL 发送GET方法的请求
*
* @param url 发送请求的 URL
* @param param 请求参数,请求参数应该是 name1=value1&name2=value2 的形式。
* @param contentType 编码类型
* @return 所代表远程资源的响应结果
*/
public static String sendGet(String url, String param, String contentType)
{
StringBuilder result = new StringBuilder();
BufferedReader in = null;
try
{
String urlNameString = StringUtils.isNotBlank(param) ? url + "?" + param : url;
log.info("sendGet - {}", urlNameString);
URL realUrl = new URL(urlNameString);
URLConnection connection = realUrl.openConnection();
connection.setRequestProperty("accept", "*/*");
connection.setRequestProperty("connection", "Keep-Alive");
connection.setRequestProperty("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64)");
connection.connect();
in = new BufferedReader(new InputStreamReader(connection.getInputStream(), contentType));
String line;
while ((line = in.readLine()) != null)
{
result.append(line);
}
log.info("recv - {}", result);
}
catch (ConnectException e)
{
log.error("调用HttpUtils.sendGet ConnectException, url=" + url + ",param=" + param, e);
}
catch (SocketTimeoutException e)
{
log.error("调用HttpUtils.sendGet SocketTimeoutException, url=" + url + ",param=" + param, e);
}
catch (IOException e)
{
log.error("调用HttpUtils.sendGet IOException, url=" + url + ",param=" + param, e);
}
catch (Exception e)
{
log.error("调用HttpsUtil.sendGet Exception, url=" + url + ",param=" + param, e);
}
finally
{
try
{
if (in != null)
{
in.close();
}
}
catch (Exception ex)
{
log.error("调用in.close Exception, url=" + url + ",param=" + param, ex);
}
}
return result.toString();
}
/**
* 向指定 URL 发送POST方法的请求
*
* @param url 发送请求的 URL
* @param param 请求参数,请求参数应该是 name1=value1&name2=value2 的形式。
* @return 所代表远程资源的响应结果
*/
public static String sendPost(String url, String param)
{
PrintWriter out = null;
BufferedReader in = null;
StringBuilder result = new StringBuilder();
try
{
log.info("sendPost - {}", url);
URL realUrl = new URL(url);
URLConnection conn = realUrl.openConnection();
conn.setRequestProperty("accept", "*/*");
conn.setRequestProperty("connection", "Keep-Alive");
conn.setRequestProperty("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64)");
conn.setRequestProperty("Accept-Charset", "utf-8");
conn.setRequestProperty("contentType", "utf-8");
conn.setDoOutput(true);
conn.setDoInput(true);
out = new PrintWriter(conn.getOutputStream());
out.print(param);
out.flush();
in = new BufferedReader(new InputStreamReader(conn.getInputStream(), StandardCharsets.UTF_8));
String line;
while ((line = in.readLine()) != null)
{
result.append(line);
}
log.info("recv - {}", result);
}
catch (ConnectException e)
{
log.error("调用HttpUtils.sendPost ConnectException, url=" + url + ",param=" + param, e);
}
catch (SocketTimeoutException e)
{
log.error("调用HttpUtils.sendPost SocketTimeoutException, url=" + url + ",param=" + param, e);
}
catch (IOException e)
{
log.error("调用HttpUtils.sendPost IOException, url=" + url + ",param=" + param, e);
}
catch (Exception e)
{
log.error("调用HttpsUtil.sendPost Exception, url=" + url + ",param=" + param, e);
}
finally
{
try
{
if (out != null)
{
out.close();
}
if (in != null)
{
in.close();
}
}
catch (IOException ex)
{
log.error("调用in.close Exception, url=" + url + ",param=" + param, ex);
}
}
return result.toString();
}
public static String sendSSLPost(String url, String param)
{
StringBuilder result = new StringBuilder();
String urlNameString = url + "?" + param;
try
{
log.info("sendSSLPost - {}", urlNameString);
SSLContext sc = SSLContext.getInstance("SSL");
sc.init(null, new TrustManager[] { new TrustAnyTrustManager() }, new java.security.SecureRandom());
URL console = new URL(urlNameString);
HttpsURLConnection conn = (HttpsURLConnection) console.openConnection();
conn.setRequestProperty("accept", "*/*");
conn.setRequestProperty("connection", "Keep-Alive");
conn.setRequestProperty("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64)");
conn.setRequestProperty("Accept-Charset", "utf-8");
conn.setRequestProperty("contentType", "utf-8");
conn.setDoOutput(true);
conn.setDoInput(true);
conn.setSSLSocketFactory(sc.getSocketFactory());
conn.setHostnameVerifier(new TrustAnyHostnameVerifier());
conn.connect();
InputStream is = conn.getInputStream();
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String ret = "";
while ((ret = br.readLine()) != null)
{
if (ret != null && !"".equals(ret.trim()))
{
result.append(new String(ret.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8));
}
}
log.info("recv - {}", result);
conn.disconnect();
br.close();
}
catch (ConnectException e)
{
log.error("调用HttpUtils.sendSSLPost ConnectException, url=" + url + ",param=" + param, e);
}
catch (SocketTimeoutException e)
{
log.error("调用HttpUtils.sendSSLPost SocketTimeoutException, url=" + url + ",param=" + param, e);
}
catch (IOException e)
{
log.error("调用HttpUtils.sendSSLPost IOException, url=" + url + ",param=" + param, e);
}
catch (Exception e)
{
log.error("调用HttpsUtil.sendSSLPost Exception, url=" + url + ",param=" + param, e);
}
return result.toString();
}
private static class TrustAnyTrustManager implements X509TrustManager
{
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType)
{
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType)
{
}
@Override
public X509Certificate[] getAcceptedIssuers()
{
return new X509Certificate[] {};
}
}
private static class TrustAnyHostnameVerifier implements HostnameVerifier
{
@Override
public boolean verify(String hostname, SSLSession session)
{
return true;
}
}
}
|
2929004360/ruoyi-sign
| 1,822
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/file/FileTypeUtils.java
|
package com.ruoyi.common.utils.file;
import java.io.File;
import org.apache.commons.lang3.StringUtils;
/**
* 文件类型工具类
*
* @author ruoyi
*/
public class FileTypeUtils
{
/**
* 获取文件类型
* <p>
* 例如: ruoyi.txt, 返回: txt
*
* @param file 文件名
* @return 后缀(不含".")
*/
public static String getFileType(File file)
{
if (null == file)
{
return StringUtils.EMPTY;
}
return getFileType(file.getName());
}
/**
* 获取文件类型
* <p>
* 例如: ruoyi.txt, 返回: txt
*
* @param fileName 文件名
* @return 后缀(不含".")
*/
public static String getFileType(String fileName)
{
int separatorIndex = fileName.lastIndexOf(".");
if (separatorIndex < 0)
{
return "";
}
return fileName.substring(separatorIndex + 1).toLowerCase();
}
/**
* 获取文件类型
*
* @param photoByte 文件字节码
* @return 后缀(不含".")
*/
public static String getFileExtendName(byte[] photoByte)
{
String strFileExtendName = "JPG";
if ((photoByte[0] == 71) && (photoByte[1] == 73) && (photoByte[2] == 70) && (photoByte[3] == 56)
&& ((photoByte[4] == 55) || (photoByte[4] == 57)) && (photoByte[5] == 97))
{
strFileExtendName = "GIF";
}
else if ((photoByte[6] == 74) && (photoByte[7] == 70) && (photoByte[8] == 73) && (photoByte[9] == 70))
{
strFileExtendName = "JPG";
}
else if ((photoByte[0] == 66) && (photoByte[1] == 77))
{
strFileExtendName = "BMP";
}
else if ((photoByte[1] == 80) && (photoByte[2] == 78) && (photoByte[3] == 71))
{
strFileExtendName = "PNG";
}
return strFileExtendName;
}
}
|
2977094657/BilibiliHistoryFetcher
| 29,928
|
routers/audio_to_text.py
|
"""
音频转文字API路由
处理视频语音转文字的API接口
"""
import os
import time
import asyncio
import signal
import traceback
import platform
from typing import Optional, List, Dict, Tuple, Any
from fastapi import APIRouter, HTTPException, BackgroundTasks
from pydantic import BaseModel, Field
from loguru import logger
from scripts.utils import load_config, setup_logger
# 确保日志系统已初始化
setup_logger()
# 创建API路由
router = APIRouter()
config = load_config()
# 全局变量
whisper_model = None
model_loading = False
model_lock = asyncio.Lock()
# 检查是否是Linux系统
is_linux = platform.system().lower() == "linux"
# 条件导入faster_whisper
whisper_available = False
try:
# 使用scripts.system_resource_check中的系统资源检查函数,避免重复检查
from scripts.system_resource_check import check_system_resources
resources = check_system_resources()
# 根据系统资源检查结果决定是否导入WhisperModel
if resources["summary"]["can_run_speech_to_text"]:
try:
from faster_whisper import WhisperModel
whisper_available = True
except ImportError as e:
print(f"导入WhisperModel失败: {str(e)}")
whisper_available = False
else:
# 使用print而不是logger避免循环引用
print(f"系统资源不足,不导入WhisperModel模块。限制原因: {resources.get('summary', {}).get('resource_limitation', '未知')}")
whisper_available = False
except ImportError as e:
print(f"导入资源检查模块失败: {str(e)}")
whisper_available = False
except Exception as e:
print(f"检查系统资源时出错: {str(e)}")
whisper_available = False
# 资源清理函数
def handle_interrupt(signum, frame):
"""处理中断信号,清理资源"""
global whisper_model
print(f"接收到信号 {signum},清理资源...")
try:
# 释放模型资源
if whisper_model is not None:
del whisper_model
whisper_model = None
print("资源已清理")
except Exception as e:
print(f"清理资源时出错: {str(e)}")
# 不再调用 os._exit(0),让服务继续运行
# 定义请求和响应模型
class TranscribeRequest(BaseModel):
audio_path: str = Field(..., description="音频文件路径,可以是相对路径或绝对路径")
model_size: str = Field("tiny", description="模型大小,可选值: tiny, base, small, medium, large-v1, large-v2, large-v3")
language: str = Field("zh", description="语言代码,默认为中文")
cid: int = Field(..., description="视频的CID,用于分类存储和命名结果")
class TranscribeResponse(BaseModel):
success: bool = Field(..., description="是否成功")
message: str = Field(..., description="处理结果或错误信息")
duration: Optional[float] = Field(None, description="音频时长(秒)")
processing_time: Optional[float] = Field(None, description="处理时间(秒)")
language_detected: Optional[str] = Field(None, description="检测到的语言")
cid: Optional[int] = Field(None, description="处理时使用的CID")
class SystemInfo(BaseModel):
os_name: str = Field(..., description="操作系统名称")
os_version: str = Field(..., description="操作系统版本")
python_version: str = Field(..., description="Python版本")
cuda_available: bool = Field(..., description="是否支持CUDA")
cuda_version: Optional[str] = Field(None, description="CUDA版本")
gpu_info: Optional[List[Dict[str, str]]] = Field(None, description="GPU信息")
cuda_setup_guide: Optional[str] = Field(None, description="CUDA安装指南")
torch_available: bool = Field(False, description="是否可以导入torch")
whisper_available: bool = Field(False, description="是否可以导入WhisperModel")
resource_limitation: Optional[str] = Field(None, description="资源限制原因")
class ModelInfo(BaseModel):
model_size: str = Field(..., description="模型大小")
is_downloaded: bool = Field(..., description="模型是否已下载")
model_path: Optional[str] = Field(None, description="模型文件路径")
download_link: Optional[str] = Field(None, description="模型下载链接")
file_size: Optional[str] = Field(None, description="模型文件大小")
class EnvironmentCheckResponse(BaseModel):
system_info: SystemInfo
models_info: Dict[str, ModelInfo]
recommended_device: str = Field(..., description="推荐使用的设备(cuda/cpu)")
compute_type: str = Field(..., description="推荐的计算类型(float16/int8)")
class WhisperModelInfo(BaseModel):
"""Whisper模型信息"""
name: str = Field(..., description="模型名称")
description: str = Field(..., description="模型描述")
is_downloaded: bool = Field(..., description="是否已下载")
path: Optional[str] = Field(None, description="模型路径")
params_size: str = Field(..., description="参数大小")
recommended_use: str = Field(..., description="推荐使用场景")
class ResourceCheckResponse(BaseModel):
"""系统资源检查响应"""
os_info: Dict[str, Any] = Field(..., description="操作系统信息")
memory: Dict[str, Any] = Field(..., description="内存信息")
cpu: Dict[str, Any] = Field(..., description="CPU信息")
disk: Dict[str, Any] = Field(..., description="磁盘信息")
summary: Dict[str, Any] = Field(..., description="资源检查总结")
can_run_speech_to_text: bool = Field(..., description="是否可以运行语音转文字功能")
limitation_reason: Optional[str] = Field(None, description="限制原因")
async def load_model(model_size, device=None, compute_type=None):
"""加载Whisper模型"""
global whisper_model, model_loading
# 检查是否可以使用WhisperModel
if not whisper_available:
raise HTTPException(
status_code=400,
detail={
"error": "WHISPER_NOT_AVAILABLE",
"message": "语音转文字功能不可用,系统资源不足或未安装相关依赖",
"suggestion": "请检查系统资源或安装相关依赖"
}
)
try:
# 检查是否已加载相同型号的模型
if whisper_model is not None and whisper_model.model_size == model_size:
logger.info(f"使用已加载的模型: {model_size}")
return whisper_model
# 检查模型是否已下载
is_downloaded, model_path = is_model_downloaded(model_size)
if not is_downloaded:
logger.error(f"模型 {model_size} 尚未下载")
raise HTTPException(
status_code=400,
detail={
"error": "MODEL_NOT_DOWNLOADED",
"message": f"模型 {model_size} 尚未下载,请先通过 /audio_to_text/models 接口查看可用模型,并确保选择已下载的模型",
"model_size": model_size
}
)
# 如果其他进程正在加载模型,等待
if model_loading:
logger.info("其他进程正在加载模型,等待...")
wait_start = time.time()
while model_loading:
# 添加超时检查
if time.time() - wait_start > 300: # 5分钟超时
raise HTTPException(
status_code=500,
detail="等待模型加载超时,请稍后重试"
)
await asyncio.sleep(1)
if whisper_model is not None and whisper_model.model_size == model_size:
return whisper_model
model_loading = True
start_time = time.time()
logger.info(f"开始加载模型: {model_size}")
try:
# 创建WhisperModel实例 - 使用同步方式加载模型,避免事件循环问题
loop = asyncio.get_running_loop()
whisper_model = await loop.run_in_executor(
None,
lambda: WhisperModel(model_size, device=device or "auto", compute_type=compute_type or "auto")
)
load_time = time.time() - start_time
logger.info(f"模型加载完成,耗时 {load_time:.2f} 秒")
# 存储模型大小信息
whisper_model.model_size = model_size
return whisper_model
except Exception as e:
logger.error(f"模型加载失败: {str(e)}")
logger.error(f"错误堆栈: {traceback.format_exc()}")
raise HTTPException(
status_code=500,
detail=f"模型加载失败: {str(e)}"
)
except Exception as e:
if isinstance(e, HTTPException):
raise e
logger.error(f"模型加载过程出错: {str(e)}")
logger.error(f"错误堆栈: {traceback.format_exc()}")
raise HTTPException(
status_code=500,
detail=f"模型加载过程出错: {str(e)}"
)
finally:
model_loading = False
logger.info("模型加载状态已重置")
def format_timestamp(seconds):
"""将秒转换为完整的时间戳格式 HH:MM:SS"""
hours = int(seconds // 3600)
minutes = int((seconds % 3600) // 60)
seconds_int = int(seconds % 60)
# 始终返回完整的 HH:MM:SS 格式
return f"{hours:02d}:{minutes:02d}:{seconds_int:02d}"
def save_transcript(all_segments, output_path):
"""保存转录结果为简洁格式,适合节省token"""
print(f"准备保存转录结果到: {output_path}")
print(f"处理的片段数量: {len(all_segments)}")
# 整理数据:移除多余的空格和控制字符
with open(output_path, "w", encoding="utf-8") as f:
# 所有片段放在一行,用空格分隔
transcript_lines = []
for segment in all_segments:
# 清理文本,替换实际换行符为空格,去除多余空格
text = segment.text.strip().replace("\n", " ")
start_time = format_timestamp(segment.start)
end_time = format_timestamp(segment.end)
# 格式化内容并添加到列表
transcript_lines.append(f"{start_time}>{end_time}: {text}")
# 将所有片段用空格连接并写入一行
f.write(" ".join(transcript_lines))
print(f"转录结果已保存: {output_path}")
async def transcribe_audio(audio_path, model_size="medium", language="zh", cid=None):
"""
转录音频文件为文本
Args:
audio_path: 音频文件路径
model_size: 模型大小
language: 语言代码
cid: 视频CID
Returns:
dict: 转录结果字典
"""
start_time = time.time()
if not whisper_available:
return {
"success": False,
"message": "语音转文字功能不可用,请安装faster-whisper",
"cid": cid
}
if not os.path.exists(audio_path):
return {
"success": False,
"message": f"音频文件不存在: {audio_path}",
"cid": cid
}
print(f"开始处理音频文件: {audio_path}")
try:
# 检测是否有GPU可用
import subprocess
has_gpu = False
try:
# 尝试使用nvidia-smi命令检测GPU
result = subprocess.run(['nvidia-smi'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
has_gpu = (result.returncode == 0)
except (FileNotFoundError, subprocess.SubprocessError):
# 命令不存在或执行失败,认为没有GPU
has_gpu = False
# 选择设备和计算类型
device = "cuda" if has_gpu else "cpu"
compute_type = "float16" if has_gpu else "int8"
print(f"使用设备: {device}, 计算类型: {compute_type}")
# 加载模型
global whisper_model
whisper_model = await load_model(model_size, device, compute_type)
# 转录音频
segments, info = whisper_model.transcribe(
audio_path,
language=language,
task="transcribe",
beam_size=5
)
# 处理结果
logger.info("处理转录结果...")
all_segments = list(segments)
logger.info(f"转录得到 {len(all_segments)} 个片段")
# 如果指定了CID,保存到对应目录
if cid:
logger.info(f"准备保存结果到CID目录: {cid}")
save_dir = os.path.join("output", "stt", str(cid))
os.makedirs(save_dir, exist_ok=True)
# 保存JSON格式
json_path = os.path.join(save_dir, f"{cid}.json")
logger.info(f"保存JSON格式到: {json_path}")
save_transcript(all_segments, json_path)
logger.info("转录结果保存完成")
processing_time = time.time() - start_time
logger.info(f"总处理时间: {processing_time:.2f} 秒")
return {
"success": True,
"message": "转录完成",
"duration": info.duration,
"language_detected": info.language,
"processing_time": processing_time
}
except HTTPException as he:
# 直接传递HTTP异常
raise he
except Exception as e:
logger.error(f"转录过程出错: {str(e)}")
logger.error(f"错误堆栈: {traceback.format_exc()}")
raise HTTPException(
status_code=500,
detail=str(e)
)
@router.post("/transcribe", response_model=TranscribeResponse, summary="转录音频文件")
async def transcribe_audio_api(request: TranscribeRequest, background_tasks: BackgroundTasks):
"""转录音频文件为文本"""
try:
start_time = time.time()
logger.info(f"收到转录请求: {request.audio_path}, 模型: {request.model_size}, 语言: {request.language}, CID: {request.cid}")
# 检查音频文件是否存在
if not os.path.exists(request.audio_path):
# 尝试使用CID查找音频文件
audio_path = await find_audio_by_cid(request.cid)
if not audio_path:
raise HTTPException(
status_code=404,
detail={
"error": "FILE_NOT_FOUND",
"message": f"未找到音频文件: {request.audio_path},也未找到CID {request.cid} 对应的音频文件",
"audio_path": request.audio_path,
"cid": request.cid
}
)
request.audio_path = audio_path
# 直接调用异步函数
result = await transcribe_audio(
request.audio_path,
model_size=request.model_size,
language=request.language,
cid=request.cid
)
processing_time = time.time() - start_time
logger.info(f"转录完成,耗时: {processing_time:.2f} 秒")
return TranscribeResponse(
success=result["success"],
message=result["message"],
duration=result.get("duration"),
processing_time=processing_time,
language_detected=result.get("language_detected"),
cid=request.cid
)
except HTTPException as e:
raise e
except Exception as e:
logger.error(f"转录过程出错: {str(e)}")
logger.error(f"错误堆栈: {traceback.format_exc()}")
raise HTTPException(
status_code=500,
detail=f"转录过程出错: {str(e)}"
)
@router.get("/models", response_model=List[WhisperModelInfo])
async def list_models():
"""
列出可用的Whisper模型,并显示每个模型的下载状态和详细信息
Returns:
模型列表,包含名称、描述、下载状态等信息
"""
# 定义模型信息
model_infos = [
{
"name": "tiny.en",
"description": "极小型(英语专用)",
"params_size": "39M参数",
"recommended_use": "适用于简单的英语语音识别,对资源要求最低"
},
{
"name": "base.en",
"description": "基础型(英语专用)",
"params_size": "74M参数",
"recommended_use": "适用于一般的英语语音识别,速度和准确度均衡"
},
{
"name": "small.en",
"description": "小型(英语专用)",
"params_size": "244M参数",
"recommended_use": "适用于较复杂的英语语音识别,准确度较高"
},
{
"name": "medium.en",
"description": "中型(英语专用)",
"params_size": "769M参数",
"recommended_use": "适用于专业的英语语音识别,准确度高"
},
{
"name": "tiny",
"description": "极小型(多语言)",
"params_size": "39M参数",
"recommended_use": "适用于简单的多语言语音识别,特别是资源受限场景"
},
{
"name": "base",
"description": "基础型(多语言)",
"params_size": "74M参数",
"recommended_use": "适用于一般的多语言语音识别,平衡性能和资源占用"
},
{
"name": "small",
"description": "小型(多语言)",
"params_size": "244M参数",
"recommended_use": "适用于较复杂的多语言语音识别,准确度和性能均衡"
},
{
"name": "medium",
"description": "中型(多语言)",
"params_size": "769M参数",
"recommended_use": "适用于专业的多语言语音识别,高准确度"
},
{
"name": "large-v1",
"description": "大型V1",
"params_size": "1550M参数",
"recommended_use": "适用于要求极高准确度的场景,支持所有语言"
},
{
"name": "large-v2",
"description": "大型V2",
"params_size": "1550M参数",
"recommended_use": "V1的改进版本,提供更好的多语言支持"
},
{
"name": "large-v3",
"description": "大型V3",
"params_size": "1550M参数",
"recommended_use": "最新版本,提供最佳的识别效果和语言支持"
}
]
result = []
for model_info in model_infos:
is_downloaded, model_path = is_model_downloaded(model_info["name"])
result.append(WhisperModelInfo(
name=model_info["name"],
description=model_info["description"],
is_downloaded=is_downloaded,
path=model_path if is_downloaded else None,
params_size=model_info["params_size"],
recommended_use=model_info["recommended_use"]
))
return result
@router.get("/find_audio", summary="根据CID查找音频文件路径")
async def find_audio_by_cid(cid: int):
"""
根据CID查找对应的音频文件路径
Args:
cid: 视频的CID
Returns:
音频文件的完整路径
"""
try:
# 构建基础下载目录路径
base_dir = os.path.join("./output/download_video")
# 遍历所有文件夹,查找包含_cid的文件夹
audio_path = None
for root, dirs, files in os.walk(base_dir):
# 检查目录名是否以_cid结尾
if root.endswith(f"_{cid}"):
# 在该目录下查找包含_cid的文件
for file in files:
if file.endswith(f"_{cid}.m4a") or file.endswith(f"_{cid}.mp3") or file.endswith(f"_{cid}.wav"):
audio_path = os.path.join(root, file)
break
if audio_path:
break
if not audio_path:
raise HTTPException(
status_code=404,
detail=f"未找到CID为{cid}的音频文件"
)
return {
"cid": cid,
"audio_path": audio_path
}
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"查找音频文件时出错: {str(e)}"
)
def get_cuda_setup_guide(os_name: str) -> str:
"""根据操作系统生成CUDA安装指南"""
if os_name.lower() == "windows":
return """Windows CUDA安装步骤:
1. 访问 NVIDIA 驱动下载页面:https://www.nvidia.cn/Download/index.aspx
2. 下载并安装适合您显卡的最新驱动
3. 访问 NVIDIA CUDA 下载页面:https://developer.nvidia.cn/cuda-downloads
4. 选择Windows版本下载并安装CUDA Toolkit
5. 安装完成后重启系统
6. 在命令行中输入 'nvidia-smi' 验证安装"""
elif os_name.lower() == "linux":
return """Linux CUDA安装步骤:
1. 检查系统是否有NVIDIA显卡:
lspci | grep -i nvidia
2. 安装NVIDIA驱动:
Ubuntu/Debian:
sudo apt update
sudo apt install nvidia-driver-xxx(替换xxx为最新版本号)
CentOS:
sudo dnf install nvidia-driver
3. 安装CUDA Toolkit:
访问:https://developer.nvidia.com/cuda-downloads
选择对应的Linux发行版,按照页面提供的命令安装
4. 设置环境变量:
echo 'export PATH=/usr/local/cuda/bin:$PATH' >> ~/.bashrc
echo 'export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH' >> ~/.bashrc
source ~/.bashrc
5. 验证安装:
nvidia-smi
nvcc --version"""
else:
return "暂不支持当前操作系统的CUDA安装指南"
def get_model_info(model_size: str) -> ModelInfo:
"""获取模型信息"""
# faster-whisper模型路径
model_path = os.path.expanduser(f"~/.cache/huggingface/hub/models--guillaumekln--faster-whisper-{model_size}")
is_downloaded = os.path.exists(model_path)
# 模型大小信息(近似值)
model_sizes = {
"tiny": "75MB",
"base": "150MB",
"small": "400MB",
"medium": "1.5GB",
"large-v1": "3GB",
"large-v2": "3GB",
"large-v3": "3GB"
}
return ModelInfo(
model_size=model_size,
is_downloaded=is_downloaded,
model_path=model_path if is_downloaded else None,
download_link=f"https://huggingface.co/guillaumekln/faster-whisper-{model_size}",
file_size=model_sizes.get(model_size, "未知")
)
@router.get("/check_environment", response_model=EnvironmentCheckResponse)
async def check_environment():
"""检查系统环境,判断是否支持运行faster-whisper"""
import platform
import sys
import os
import subprocess
os_name = platform.system()
os_version = platform.version()
python_version = sys.version
# 初始化变量
cuda_available = False
cuda_version = None
gpu_info = None
resource_limitation = None
try:
# 检查CUDA是否可用
cuda_available = False
try:
# 使用nvidia-smi命令检查CUDA
result = subprocess.run(["nvidia-smi"], capture_output=True, text=True)
if result.returncode == 0:
# 成功执行nvidia-smi,表示CUDA可用
cuda_available = True
# 尝试提取CUDA版本
import re
match = re.search(r"CUDA Version: (\d+\.\d+)", result.stdout)
if match:
cuda_version = match.group(1)
# 提取GPU信息
gpu_info = []
lines = result.stdout.split('\n')
for i, line in enumerate(lines):
if "GeForce" in line or "RTX" in line or "GTX" in line or "Tesla" in line or "Quadro" in line:
name_match = re.search(r"(GeForce|RTX|GTX|Tesla|Quadro)[\w\s]+", line)
name = name_match.group(0) if name_match else "Unknown GPU"
# 尝试获取显存信息
mem_match = None
if i+1 < len(lines):
mem_match = re.search(r"(\d+)MiB\s+/\s+(\d+)MiB", lines[i+1])
memory = f"{int(mem_match.group(2))/1024:.1f}GB" if mem_match else "Unknown"
gpu_info.append({
"name": name.strip(),
"memory": memory
})
except (FileNotFoundError, subprocess.SubprocessError):
# nvidia-smi命令不可用,CUDA不可用
cuda_available = False
# 获取CUDA安装指南
cuda_setup_guide = get_cuda_setup_guide(os_name)
# 如果whisper不可用,获取资源限制原因
if not whisper_available:
try:
# 使用scripts.system_resource_check中的系统资源检查函数,避免重复检查
from scripts.system_resource_check import check_system_resources
resources = check_system_resources()
resource_limitation = resources.get('summary', {}).get('resource_limitation', '系统资源不足')
except Exception as e:
resource_limitation = f"资源检查失败: {str(e)}"
# 推荐设备选择
recommended_device = "cuda" if cuda_available else "cpu"
compute_type = "float16" if cuda_available else "int8"
# 获取模型信息
models_info = {}
for size in ["tiny", "base", "small", "medium", "large-v1", "large-v2", "large-v3"]:
models_info[size] = get_model_info(size)
return EnvironmentCheckResponse(
system_info=SystemInfo(
os_name=os_name,
os_version=os_version,
python_version=python_version,
cuda_available=cuda_available,
cuda_version=cuda_version,
gpu_info=gpu_info,
cuda_setup_guide=cuda_setup_guide,
torch_available=whisper_available,
whisper_available=whisper_available,
resource_limitation=resource_limitation
),
models_info=models_info,
recommended_device=recommended_device,
compute_type=compute_type
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"环境检查失败: {str(e)}"
)
@router.post("/download_model", summary="下载指定的Whisper模型")
async def download_model(model_size: str):
"""
下载指定的Whisper模型
Args:
model_size: 模型大小,可选值: tiny, base, small, medium, large-v1, large-v2, large-v3
"""
try:
# 检查模型是否已下载
is_downloaded, model_path = is_model_downloaded(model_size)
if is_downloaded:
return {
"status": "already_downloaded",
"message": f"模型 {model_size} 已下载",
"model_path": model_path
}
# 创建临时的WhisperModel实例来触发下载
# 注意:这里会阻塞直到下载完成
logger.info(f"开始下载模型: {model_size}")
start_time = time.time()
# 使用线程执行器来避免阻塞
loop = asyncio.get_event_loop()
await loop.run_in_executor(
None,
lambda: WhisperModel(model_size, device="cpu", compute_type="int8")
)
download_time = time.time() - start_time
logger.info(f"模型下载完成,耗时: {download_time:.2f} 秒")
# 再次检查模型是否已下载
is_downloaded, model_path = is_model_downloaded(model_size)
if not is_downloaded:
raise HTTPException(
status_code=500,
detail="模型下载似乎完成但未找到模型文件"
)
return {
"status": "success",
"message": f"模型 {model_size} 下载完成",
"model_path": model_path,
"download_time": f"{download_time:.2f}秒"
}
except Exception as e:
logger.error(f"模型下载失败: {str(e)}")
logger.error(f"错误堆栈: {traceback.format_exc()}")
raise HTTPException(
status_code=500,
detail=f"模型下载失败: {str(e)}"
)
@router.get("/resource_check", response_model=ResourceCheckResponse)
async def check_system_resources_api():
"""
检查系统资源是否满足运行语音转文字的要求
返回:
ResourceCheckResponse: 包含系统资源检查结果的响应
"""
try:
# 使用scripts.system_resource_check中的系统资源检查函数,避免重复检查
from scripts.system_resource_check import check_system_resources
resources = check_system_resources()
# 检查是否可以运行语音转文字
can_run_speech_to_text = resources["summary"]["can_run_speech_to_text"] and whisper_available
# 获取限制原因
limitation_reason = None
if not can_run_speech_to_text:
if not whisper_available:
limitation_reason = "缺少必要的依赖,请安装faster-whisper"
else:
limitation_reason = resources.get("summary", {}).get("resource_limitation", "系统资源不足")
return ResourceCheckResponse(
**resources,
can_run_speech_to_text=can_run_speech_to_text,
limitation_reason=limitation_reason
)
except Exception as e:
# 使用print而不是logger避免循环引用
print(f"检查系统资源时出错: {e}")
raise HTTPException(
status_code=500,
detail=f"检查系统资源时出错: {str(e)}"
)
class DeleteModelRequest(BaseModel):
model_size: str = Field(..., description="要删除的模型大小,可选值: tiny, base, small, medium, large-v1, large-v2, large-v3")
@router.delete("/models", summary="删除指定的Whisper模型")
async def delete_model(request: DeleteModelRequest):
"""
删除指定的Whisper模型
Args:
request: 包含要删除的模型大小
Returns:
dict: 包含删除操作结果的信息
"""
try:
# 检查系统资源是否足够运行语音转文字功能
if not whisper_available:
raise HTTPException(
status_code=400,
detail={
"error": "WHISPER_NOT_AVAILABLE",
"message": "语音转文字功能不可用,系统资源不足或未安装相关依赖",
"suggestion": "请检查系统资源或安装相关依赖"
}
)
# 检查模型是否已下载
is_downloaded, model_path = is_model_downloaded(request.model_size)
if not is_downloaded:
return {
"success": False,
"message": f"模型 {request.model_size} 未下载,无需删除",
"model_size": request.model_size
}
# 如果模型正在使用中,不允许删除
global whisper_model
if whisper_model is not None and whisper_model.model_size == request.model_size:
return {
"success": False,
"message": f"模型 {request.model_size} 当前正在使用中,无法删除。请先关闭使用该模型的任务后再尝试删除。",
"model_size": request.model_size
}
# 删除模型文件
import shutil
try:
if model_path and os.path.exists(model_path):
shutil.rmtree(model_path)
logger.info(f"已成功删除模型: {request.model_size},路径: {model_path}")
return {
"success": True,
"message": f"已成功删除模型: {request.model_size}",
"model_size": request.model_size,
"model_path": model_path
}
else:
return {
"success": False,
"message": f"模型路径不存在: {model_path}",
"model_size": request.model_size
}
except Exception as e:
logger.error(f"删除模型文件时出错: {str(e)}")
return {
"success": False,
"message": f"删除模型文件时出错: {str(e)}",
"model_size": request.model_size,
"model_path": model_path
}
except Exception as e:
logger.error(f"删除模型时出错: {str(e)}")
traceback.print_exc()
raise HTTPException(
status_code=500,
detail=f"删除模型时出错: {str(e)}"
)
@router.get("/check_stt_file", summary="检查指定CID的转换后文件是否存在")
async def check_stt_file(cid: int):
"""
检查指定CID的语音转文字文件是否存在
Args:
cid: 视频的CID
Returns:
dict: 包含文件是否存在的信息
"""
try:
# 构建文件路径
save_dir = os.path.join("output", "stt", str(cid))
json_path = os.path.join(save_dir, f"{cid}.json")
# 检查文件是否存在
exists = os.path.exists(json_path)
return {
"success": True,
"exists": exists,
"cid": cid,
"file_path": json_path if exists else None
}
except Exception as e:
logger.error(f"检查STT文件时出错: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"检查STT文件时出错: {str(e)}"
)
def is_model_downloaded(model_name: str) -> Tuple[bool, Optional[str]]:
"""检查模型是否已下载
Args:
model_name: 模型名称
Returns:
(是否已下载, 模型路径)
"""
# 首先检查操作系统类型,决定缓存目录的位置
if os.name == 'nt': # Windows
cache_dir = os.path.join(os.environ.get('USERPROFILE', ''), '.cache', 'huggingface', 'hub')
else: # macOS / Linux
cache_dir = os.path.join(os.path.expanduser('~'), '.cache', 'huggingface', 'hub')
# 可能的模型提供者列表
providers = ["guillaumekln", "Systran"]
# 检查每个可能的提供者路径
for provider in providers:
model_id = f"{provider}/faster-whisper-{model_name}"
model_dir = os.path.join(cache_dir, 'models--' + model_id.replace('/', '--'))
if os.path.exists(model_dir) and os.path.exists(os.path.join(model_dir, 'snapshots')):
return True, model_dir
return False, None
|
2929004360/ruoyi-sign
| 8,056
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/file/FileUploadUtils.java
|
package com.ruoyi.common.utils.file;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FilenameUtils;
import org.springframework.web.multipart.MultipartFile;
import com.ruoyi.common.config.RuoYiConfig;
import com.ruoyi.common.constant.Constants;
import com.ruoyi.common.exception.file.FileNameLengthLimitExceededException;
import com.ruoyi.common.exception.file.FileSizeLimitExceededException;
import com.ruoyi.common.exception.file.InvalidExtensionException;
import com.ruoyi.common.utils.DateUtils;
import com.ruoyi.common.utils.StringUtils;
import com.ruoyi.common.utils.uuid.Seq;
/**
* 文件上传工具类
*
* @author ruoyi
*/
public class FileUploadUtils
{
/**
* 默认大小 50M
*/
public static final long DEFAULT_MAX_SIZE = 50 * 1024 * 1024L;
/**
* 默认的文件名最大长度 100
*/
public static final int DEFAULT_FILE_NAME_LENGTH = 100;
/**
* 默认上传的地址
*/
private static String defaultBaseDir = RuoYiConfig.getProfile();
public static void setDefaultBaseDir(String defaultBaseDir)
{
FileUploadUtils.defaultBaseDir = defaultBaseDir;
}
public static String getDefaultBaseDir()
{
return defaultBaseDir;
}
/**
* 以默认配置进行文件上传
*
* @param file 上传的文件
* @return 文件名称
* @throws Exception
*/
public static final String upload(MultipartFile file) throws IOException
{
try
{
return upload(getDefaultBaseDir(), file, MimeTypeUtils.DEFAULT_ALLOWED_EXTENSION);
}
catch (Exception e)
{
throw new IOException(e.getMessage(), e);
}
}
/**
* 根据文件路径上传
*
* @param baseDir 相对应用的基目录
* @param file 上传的文件
* @return 文件名称
* @throws IOException
*/
public static final String upload(String baseDir, MultipartFile file) throws IOException
{
try
{
return upload(baseDir, file, MimeTypeUtils.DEFAULT_ALLOWED_EXTENSION);
}
catch (Exception e)
{
throw new IOException(e.getMessage(), e);
}
}
/**
* 文件上传
*
* @param baseDir 相对应用的基目录
* @param file 上传的文件
* @param allowedExtension 上传文件类型
* @return 返回上传成功的文件名
* @throws FileSizeLimitExceededException 如果超出最大大小
* @throws FileNameLengthLimitExceededException 文件名太长
* @throws IOException 比如读写文件出错时
* @throws InvalidExtensionException 文件校验异常
*/
public static final String upload(String baseDir, MultipartFile file, String[] allowedExtension)
throws FileSizeLimitExceededException, IOException, FileNameLengthLimitExceededException,
InvalidExtensionException
{
int fileNamelength = Objects.requireNonNull(file.getOriginalFilename()).length();
if (fileNamelength > FileUploadUtils.DEFAULT_FILE_NAME_LENGTH)
{
throw new FileNameLengthLimitExceededException(FileUploadUtils.DEFAULT_FILE_NAME_LENGTH);
}
assertAllowed(file, allowedExtension);
String fileName = extractFilename(file);
String absPath = getAbsoluteFile(baseDir, fileName).getAbsolutePath();
file.transferTo(Paths.get(absPath));
return getPathFileName(baseDir, fileName);
}
/**
* 编码文件名
*/
public static final String extractFilename(MultipartFile file)
{
return StringUtils.format("{}/{}_{}.{}", DateUtils.datePath(),
FilenameUtils.getBaseName(stringFilter(file.getOriginalFilename())), Seq.getId(Seq.uploadSeqType), getExtension(file));
}
public static String stringFilter (String str){
String regEx="[\\u00A0\\s\"`~!@#$%^&*()+=|{}':;',\\[\\].<>/?~!@#¥%……&*()——+|{}【】‘;:”“'。,、?]";
Pattern p = Pattern.compile(regEx);
Matcher m = p.matcher(str);
return m.replaceAll("").trim();
}
public static final File getAbsoluteFile(String uploadDir, String fileName) throws IOException
{
File desc = new File(uploadDir + File.separator + fileName);
if (!desc.getParentFile().exists())
{
desc.getParentFile().mkdirs();
}
// if (!desc.exists())
// {
// if (!desc.getParentFile().exists())
// {
// desc.getParentFile().mkdirs();
// }
// }
return desc;
}
public static final String getPathFileName(String uploadDir, String fileName) throws IOException
{
int dirLastIndex = RuoYiConfig.getProfile().length() + 1;
String currentDir = StringUtils.substring(uploadDir, dirLastIndex);
return Constants.RESOURCE_PREFIX + "/" + currentDir + "/" + fileName;
}
/**
* 文件大小校验
*
* @param file 上传的文件
* @return
* @throws FileSizeLimitExceededException 如果超出最大大小
* @throws InvalidExtensionException
*/
public static final void assertAllowed(MultipartFile file, String[] allowedExtension)
throws FileSizeLimitExceededException, InvalidExtensionException
{
long size = file.getSize();
if (size > DEFAULT_MAX_SIZE)
{
throw new FileSizeLimitExceededException(DEFAULT_MAX_SIZE / 1024 / 1024);
}
String fileName = file.getOriginalFilename();
String extension = getExtension(file);
if (allowedExtension != null && !isAllowedExtension(extension, allowedExtension))
{
if (allowedExtension == MimeTypeUtils.IMAGE_EXTENSION)
{
throw new InvalidExtensionException.InvalidImageExtensionException(allowedExtension, extension,
fileName);
}
else if (allowedExtension == MimeTypeUtils.FLASH_EXTENSION)
{
throw new InvalidExtensionException.InvalidFlashExtensionException(allowedExtension, extension,
fileName);
}
else if (allowedExtension == MimeTypeUtils.MEDIA_EXTENSION)
{
throw new InvalidExtensionException.InvalidMediaExtensionException(allowedExtension, extension,
fileName);
}
else if (allowedExtension == MimeTypeUtils.VIDEO_EXTENSION)
{
throw new InvalidExtensionException.InvalidVideoExtensionException(allowedExtension, extension,
fileName);
}
else
{
throw new InvalidExtensionException(allowedExtension, extension, fileName);
}
}
}
/**
* 判断MIME类型是否是允许的MIME类型
*
* @param extension
* @param allowedExtension
* @return
*/
public static final boolean isAllowedExtension(String extension, String[] allowedExtension)
{
for (String str : allowedExtension)
{
if (str.equalsIgnoreCase(extension))
{
return true;
}
}
return false;
}
/**
* 获取文件名的后缀
*
* @param file 表单文件
* @return 后缀名
*/
public static final String getExtension(MultipartFile file)
{
String extension = FilenameUtils.getExtension(file.getOriginalFilename());
if (StringUtils.isEmpty(extension))
{
extension = MimeTypeUtils.getExtension(Objects.requireNonNull(file.getContentType()));
}
return extension;
}
/**
* 获取文件后缀的方法
*
* @param filePath 文件路径
* @return 文件后缀
* @author fengcheng
*/
public static String getFileExtension(String filePath) {
File file = new File(filePath);
String extension = "";
try {
if (file.exists()) {
String name = file.getName();
extension = name.substring(name.lastIndexOf(".") + 1);
}
} catch (Exception e) {
extension = "";
}
return extension;
}
}
|
2929004360/ruoyi-sign
| 2,483
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/file/ImageUtils.java
|
package com.ruoyi.common.utils.file;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.Arrays;
import org.apache.poi.util.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ruoyi.common.config.RuoYiConfig;
import com.ruoyi.common.constant.Constants;
import com.ruoyi.common.utils.StringUtils;
/**
* 图片处理工具类
*
* @author ruoyi
*/
public class ImageUtils
{
private static final Logger log = LoggerFactory.getLogger(ImageUtils.class);
public static byte[] getImage(String imagePath)
{
InputStream is = getFile(imagePath);
try
{
return IOUtils.toByteArray(is);
}
catch (Exception e)
{
log.error("图片加载异常 {}", e);
return null;
}
finally
{
IOUtils.closeQuietly(is);
}
}
public static InputStream getFile(String imagePath)
{
try
{
byte[] result = readFile(imagePath);
result = Arrays.copyOf(result, result.length);
return new ByteArrayInputStream(result);
}
catch (Exception e)
{
log.error("获取图片异常 {}", e);
}
return null;
}
/**
* 读取文件为字节数据
*
* @param url 地址
* @return 字节数据
*/
public static byte[] readFile(String url)
{
InputStream in = null;
try
{
if (url.startsWith("http"))
{
// 网络地址
URL urlObj = new URL(url);
URLConnection urlConnection = urlObj.openConnection();
urlConnection.setConnectTimeout(30 * 1000);
urlConnection.setReadTimeout(60 * 1000);
urlConnection.setDoInput(true);
in = urlConnection.getInputStream();
}
else
{
// 本机地址
String localPath = RuoYiConfig.getProfile();
String downloadPath = localPath + StringUtils.substringAfter(url, Constants.RESOURCE_PREFIX);
in = new FileInputStream(downloadPath);
}
return IOUtils.toByteArray(in);
}
catch (Exception e)
{
log.error("获取文件路径异常 {}", e);
return null;
}
finally
{
IOUtils.closeQuietly(in);
}
}
}
|
281677160/openwrt-package
| 101,698
|
luci-app-homeproxy/po/zh_Hans/homeproxy.po
|
msgid ""
msgstr ""
"Content-Type: text/plain; charset=UTF-8\n"
"Project-Id-Version: PACKAGE VERSION\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
"Language: zh_Hans\n"
"MIME-Version: 1.0\n"
"Content-Transfer-Encoding: 8bit\n"
#: htdocs/luci-static/resources/view/homeproxy/status.js:206
msgid "%s log"
msgstr "%s 日志"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1456
msgid "%s nodes removed"
msgstr "移除了 %s 个节点"
#: htdocs/luci-static/resources/homeproxy.js:279
msgid "/etc/homeproxy/certs/..., /etc/acme/..., /etc/ssl/..."
msgstr "/etc/homeproxy/certs/...,/etc/acme/...,/etc/ssl/..."
#: htdocs/luci-static/resources/view/homeproxy/client.js:609
msgid "4 or 6. Not limited if empty."
msgstr "4 或 6。留空不限制。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:775
#: htdocs/luci-static/resources/view/homeproxy/client.js:1209
msgid ""
"<code>%s</code> will be temporarily overwritten to <code>%s</code> after 50 "
"triggers in 30s if not enabled."
msgstr ""
"<code>%s</code> 在未启用的情况下,50 次触发后会在 30 秒内临时覆盖为 "
"<code>%s</code>。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1086
#: htdocs/luci-static/resources/view/homeproxy/node.js:1105
#: htdocs/luci-static/resources/view/homeproxy/server.js:768
#: htdocs/luci-static/resources/view/homeproxy/server.js:787
msgid "<strong>Save your configuration before uploading files!</strong>"
msgstr "<strong>上传文件前请先保存配置!</strong>"
#: htdocs/luci-static/resources/view/homeproxy/server.js:664
msgid "API token"
msgstr "API 令牌"
#: htdocs/luci-static/resources/view/homeproxy/node.js:670
msgid "Accept any if empty."
msgstr "留空则不校验。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1137
msgid "Accept empty query response"
msgstr "接受空查询响应"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1398
msgid "Access Control"
msgstr "访问控制"
#: htdocs/luci-static/resources/view/homeproxy/server.js:647
msgid "Access key ID"
msgstr "访问密钥 ID"
#: htdocs/luci-static/resources/view/homeproxy/server.js:653
msgid "Access key secret"
msgstr "访问密钥"
#: htdocs/luci-static/resources/view/homeproxy/client.js:669
#: htdocs/luci-static/resources/view/homeproxy/client.js:1145
msgid "Action"
msgstr "操作"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1055
msgid "Add a DNS rule"
msgstr "新增 DNS 规则"
#: htdocs/luci-static/resources/view/homeproxy/client.js:933
msgid "Add a DNS server"
msgstr "新增 DNS 服务器"
#: htdocs/luci-static/resources/view/homeproxy/node.js:396
msgid "Add a node"
msgstr "新增节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:405
msgid "Add a routing node"
msgstr "新增路由节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:576
msgid "Add a routing rule"
msgstr "新增路由规则"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1319
msgid "Add a rule set"
msgstr "新增规则集"
#: htdocs/luci-static/resources/view/homeproxy/server.js:156
msgid "Add a server"
msgstr "新增服务器"
#: htdocs/luci-static/resources/view/homeproxy/client.js:974
msgid "Additional headers to be sent to the DNS server."
msgstr "发送到 DNS 服务器的附加标头。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:957
#: htdocs/luci-static/resources/view/homeproxy/node.js:448
msgid "Address"
msgstr "地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:987
msgid "Address resolver"
msgstr "地址解析器"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1019
msgid "Address strategy"
msgstr "地址解析策略"
#: htdocs/luci-static/resources/view/homeproxy/server.js:640
msgid "Alibaba Cloud DNS"
msgstr "阿里云 DNS"
#: htdocs/luci-static/resources/view/homeproxy/client.js:186
#: htdocs/luci-static/resources/view/homeproxy/client.js:220
msgid "Aliyun Public DNS (223.5.5.5)"
msgstr "阿里云公共 DNS(223.5.5.5)"
#: htdocs/luci-static/resources/view/homeproxy/client.js:266
msgid "All ports"
msgstr "所有端口"
#: htdocs/luci-static/resources/view/homeproxy/server.js:172
msgid "Allow access from the Internet."
msgstr "允许来自互联网的访问。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1040
#: htdocs/luci-static/resources/view/homeproxy/node.js:1382
msgid "Allow insecure"
msgstr "允许不安全连接"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1041
msgid "Allow insecure connection at TLS client."
msgstr "允许 TLS 客户端侧的不安全连接。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1383
msgid "Allow insecure connection by default when add nodes from subscriptions."
msgstr "从订阅获取节点时,默认允许不安全连接。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:874
#: htdocs/luci-static/resources/view/homeproxy/server.js:482
msgid "Allowed payload size is in the request."
msgstr "请求中允许的载荷大小。"
#: htdocs/luci-static/resources/view/homeproxy/status.js:95
msgid "Already at the latest version."
msgstr "已是最新版本。"
#: htdocs/luci-static/resources/view/homeproxy/status.js:92
msgid "Already in updating."
msgstr "已在更新中。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:742
#: htdocs/luci-static/resources/view/homeproxy/server.js:391
msgid "Alter ID"
msgstr "额外 ID"
#: htdocs/luci-static/resources/view/homeproxy/server.js:678
msgid "Alternative HTTP port"
msgstr "替代 HTTP 端口"
#: htdocs/luci-static/resources/view/homeproxy/server.js:684
msgid "Alternative TLS port"
msgstr "替代 HTTPS 端口"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1419
msgid "An error occurred during updating subscriptions: %s"
msgstr "更新订阅时发生错误:%s"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1226
msgid "Answer"
msgstr "回答"
#: htdocs/luci-static/resources/view/homeproxy/node.js:429
#: htdocs/luci-static/resources/view/homeproxy/server.js:176
msgid "AnyTLS"
msgstr "AnyTLS"
#: htdocs/luci-static/resources/view/homeproxy/server.js:245
msgid "AnyTLS padding scheme in array."
msgstr "AnyTLS 填充方案。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:799
#: htdocs/luci-static/resources/view/homeproxy/client.js:909
#: htdocs/luci-static/resources/view/homeproxy/client.js:1194
msgid ""
"Append a <code>edns0-subnet</code> OPT extra record with the specified IP "
"prefix to every query by default.<br/>If value is an IP address instead of "
"prefix, <code>/32</code> or <code>/128</code> will be appended automatically."
msgstr ""
"将带有指定 IP 前缀的 <code>edns0-subnet</code> OPT 记录附加到每个查询。如果值"
"是 IP 地址而不是前缀,则会自动添加 <code>/32</code> 或 <code>/128</code>。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1072
msgid "Append self-signed certificate"
msgstr "追加自签名证书"
#: htdocs/luci-static/resources/view/homeproxy/node.js:407
msgid "Applied"
msgstr "已应用"
#: htdocs/luci-static/resources/view/homeproxy/node.js:400
#: htdocs/luci-static/resources/view/homeproxy/node.js:410
msgid "Apply"
msgstr "应用"
#: htdocs/luci-static/resources/view/homeproxy/node.js:18
msgid "Are you sure to allow insecure?"
msgstr "确定要允许不安全连接吗?"
#: htdocs/luci-static/resources/view/homeproxy/server.js:363
msgid "Auth timeout"
msgstr "认证超时"
#: htdocs/luci-static/resources/view/homeproxy/node.js:766
msgid "Authenticated length"
msgstr "认证长度"
#: htdocs/luci-static/resources/view/homeproxy/node.js:561
#: htdocs/luci-static/resources/view/homeproxy/server.js:282
msgid "Authentication payload"
msgstr "认证载荷"
#: htdocs/luci-static/resources/view/homeproxy/node.js:554
#: htdocs/luci-static/resources/view/homeproxy/server.js:275
msgid "Authentication type"
msgstr "认证类型"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1335
msgid "Auto update"
msgstr "自动更新"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1336
msgid "Auto update subscriptions and geodata."
msgstr "自动更新订阅和地理数据。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:702
msgid "BBR"
msgstr "BBR"
#: htdocs/luci-static/resources/view/homeproxy/status.js:235
msgid "BaiDu"
msgstr "百度"
#: htdocs/luci-static/resources/view/homeproxy/node.js:556
#: htdocs/luci-static/resources/view/homeproxy/server.js:277
msgid "Base64"
msgstr "Base64"
#: htdocs/luci-static/resources/view/homeproxy/client.js:323
msgid "Based on google/gvisor."
msgstr "基于 google/gvisor。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1340
msgid "Binary file"
msgstr "二进制文件"
#: htdocs/luci-static/resources/view/homeproxy/client.js:453
#: htdocs/luci-static/resources/view/homeproxy/client.js:1411
#: htdocs/luci-static/resources/view/homeproxy/server.js:874
msgid "Bind interface"
msgstr "绑定接口"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1412
msgid ""
"Bind outbound traffic to specific interface. Leave empty to auto detect."
msgstr "绑定出站流量至指定端口。留空自动检测。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:617
#: htdocs/luci-static/resources/view/homeproxy/client.js:1105
msgid "BitTorrent"
msgstr "BitTorrent"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1369
msgid "Blacklist mode"
msgstr "黑名单模式"
#: htdocs/luci-static/resources/view/homeproxy/client.js:365
msgid "Block"
msgstr "封锁"
#: htdocs/luci-static/resources/view/homeproxy/client.js:612
#: htdocs/luci-static/resources/view/homeproxy/client.js:640
#: htdocs/luci-static/resources/view/homeproxy/client.js:1091
#: htdocs/luci-static/resources/view/homeproxy/client.js:1101
#: htdocs/luci-static/resources/view/homeproxy/server.js:869
msgid "Both"
msgstr "全部"
#: htdocs/luci-static/resources/view/homeproxy/client.js:343
msgid "Bypass CN traffic"
msgstr "绕过中国流量"
#: htdocs/luci-static/resources/view/homeproxy/client.js:253
msgid "Bypass mainland China"
msgstr "大陆白名单"
#: htdocs/luci-static/resources/view/homeproxy/client.js:344
msgid "Bypass mainland China traffic via firewall rules by default."
msgstr "默认使用防火墙规则绕过中国大陆流量。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:627
msgid "CA provider"
msgstr "CA 颁发机构"
#: htdocs/luci-static/resources/view/homeproxy/client.js:221
msgid "CNNIC Public DNS (210.2.4.8)"
msgstr "CNNIC 公共 DNS(210.2.4.8)"
#: htdocs/luci-static/resources/view/homeproxy/node.js:700
msgid "CUBIC"
msgstr "CUBIC"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1239
msgid "Cancel"
msgstr "取消"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1077
#: htdocs/luci-static/resources/view/homeproxy/server.js:756
msgid "Certificate path"
msgstr "证书路径"
#: htdocs/luci-static/resources/view/homeproxy/status.js:57
msgid "Check"
msgstr "检查"
#: htdocs/luci-static/resources/view/homeproxy/status.js:105
msgid "Check update"
msgstr "检查更新"
#: htdocs/luci-static/resources/view/homeproxy/client.js:217
msgid "China DNS server"
msgstr "国内 DNS 服务器"
#: htdocs/luci-static/resources/view/homeproxy/status.js:244
msgid "China IPv4 list version"
msgstr "国内 IPv4 库版本"
#: htdocs/luci-static/resources/view/homeproxy/status.js:248
msgid "China IPv6 list version"
msgstr "国内 IPv6 库版本"
#: htdocs/luci-static/resources/view/homeproxy/status.js:252
msgid "China list version"
msgstr "国内域名列表版本"
#: htdocs/luci-static/resources/view/homeproxy/client.js:629
msgid "Chromium / Cronet"
msgstr "Chromium / Cronet"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1064
#: htdocs/luci-static/resources/view/homeproxy/server.js:586
msgid "Cipher suites"
msgstr "密码套件"
#: htdocs/luci-static/resources/view/homeproxy/client.js:183
msgid "Cisco Public DNS (208.67.222.222)"
msgstr "思科公共 DNS(208.67.222.222)"
#: htdocs/luci-static/resources/view/homeproxy/status.js:214
msgid "Clean log"
msgstr "清空日志"
#: htdocs/luci-static/resources/view/homeproxy/client.js:627
msgid "Client"
msgstr "客户端"
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:14
msgid "Client Settings"
msgstr "客户端设置"
#: htdocs/luci-static/resources/view/homeproxy/node.js:664
msgid "Client version"
msgstr "客户端版本"
#: htdocs/luci-static/resources/view/homeproxy/client.js:182
msgid "CloudFlare Public DNS (1.1.1.1)"
msgstr "CloudFlare 公共 DNS(1.1.1.1)"
#: htdocs/luci-static/resources/view/homeproxy/server.js:641
msgid "Cloudflare"
msgstr "Cloudflare"
#: htdocs/luci-static/resources/view/homeproxy/client.js:114
#: htdocs/luci-static/resources/view/homeproxy/server.js:142
#: htdocs/luci-static/resources/view/homeproxy/status.js:176
msgid "Collecting data..."
msgstr "正在收集数据中..."
#: htdocs/luci-static/resources/view/homeproxy/client.js:267
msgid "Common ports only (bypass P2P traffic)"
msgstr "仅常用端口(绕过 P2P 流量)"
#: htdocs/luci-static/resources/view/homeproxy/node.js:698
#: htdocs/luci-static/resources/view/homeproxy/server.js:354
msgid "Congestion control algorithm"
msgstr "拥塞控制算法"
#: htdocs/luci-static/resources/view/homeproxy/status.js:232
msgid "Connection check"
msgstr "连接检查"
#: htdocs/luci-static/resources/view/homeproxy/client.js:255
msgid "Custom routing"
msgstr "自定义路由"
#: htdocs/luci-static/resources/view/homeproxy/client.js:618
msgid "DNS"
msgstr "DNS"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1046
msgid "DNS Rules"
msgstr "DNS 规则"
#: htdocs/luci-static/resources/view/homeproxy/client.js:924
msgid "DNS Servers"
msgstr "DNS 服务器"
#: htdocs/luci-static/resources/view/homeproxy/client.js:872
msgid "DNS Settings"
msgstr "DNS 设置"
#: htdocs/luci-static/resources/view/homeproxy/server.js:639
msgid "DNS provider"
msgstr "DNS 提供商"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1055
msgid "DNS rule"
msgstr "DNS 规则"
#: htdocs/luci-static/resources/view/homeproxy/client.js:179
#: htdocs/luci-static/resources/view/homeproxy/client.js:749
#: htdocs/luci-static/resources/view/homeproxy/client.js:933
msgid "DNS server"
msgstr "DNS 服务器"
#: htdocs/luci-static/resources/view/homeproxy/server.js:635
msgid "DNS01 challenge"
msgstr "DNS01 验证"
#: htdocs/luci-static/resources/view/homeproxy/client.js:619
#: htdocs/luci-static/resources/view/homeproxy/client.js:1106
msgid "DTLS"
msgstr "DTLS"
#: htdocs/luci-static/resources/view/homeproxy/status.js:136
msgid "Debug"
msgstr "调试"
#: htdocs/luci-static/resources/homeproxy.js:17
#: htdocs/luci-static/resources/view/homeproxy/client.js:433
#: htdocs/luci-static/resources/view/homeproxy/client.js:603
#: htdocs/luci-static/resources/view/homeproxy/client.js:755
#: htdocs/luci-static/resources/view/homeproxy/client.js:1082
#: htdocs/luci-static/resources/view/homeproxy/client.js:1380
#: htdocs/luci-static/resources/view/homeproxy/node.js:710
msgid "Default"
msgstr "默认"
#: htdocs/luci-static/resources/view/homeproxy/client.js:382
#: htdocs/luci-static/resources/view/homeproxy/client.js:434
#: htdocs/luci-static/resources/view/homeproxy/client.js:756
#: htdocs/luci-static/resources/view/homeproxy/client.js:887
#: htdocs/luci-static/resources/view/homeproxy/client.js:994
#: htdocs/luci-static/resources/view/homeproxy/client.js:1160
msgid "Default DNS (issued by WAN)"
msgstr "默认 DNS(由 WAN 下发)"
#: htdocs/luci-static/resources/view/homeproxy/client.js:882
msgid "Default DNS server"
msgstr "默认 DNS 服务器"
#: htdocs/luci-static/resources/view/homeproxy/client.js:377
msgid "Default DNS server for resolving domain name in the server address."
msgstr "用于解析服务器地址中的域名的默认 DNS 服务器。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:877
msgid "Default DNS strategy"
msgstr "默认 DNS 解析策略"
#: htdocs/luci-static/resources/view/homeproxy/client.js:357
msgid "Default outbound"
msgstr "默认出站"
#: htdocs/luci-static/resources/view/homeproxy/client.js:376
msgid "Default outbound DNS"
msgstr "默认出站 DNS"
#: htdocs/luci-static/resources/view/homeproxy/client.js:358
msgid "Default outbound for connections not matched by any routing rules."
msgstr "用于未被任何路由规则匹配的连接的默认出站。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1389
msgid "Default packet encoding"
msgstr "默认包封装格式"
#: htdocs/luci-static/resources/view/homeproxy/server.js:606
msgid "Default server name"
msgstr "默认服务器名称"
#: htdocs/luci-static/resources/view/homeproxy/client.js:364
#: htdocs/luci-static/resources/view/homeproxy/client.js:466
#: htdocs/luci-static/resources/view/homeproxy/client.js:684
#: htdocs/luci-static/resources/view/homeproxy/client.js:1032
#: htdocs/luci-static/resources/view/homeproxy/client.js:1381
#: htdocs/luci-static/resources/view/homeproxy/node.js:428
msgid "Direct"
msgstr "直连"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1513
msgid "Direct Domain List"
msgstr "直连域名列表"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1427
#: htdocs/luci-static/resources/view/homeproxy/client.js:1472
msgid "Direct IPv4 IP-s"
msgstr "直连 IPv4 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1430
#: htdocs/luci-static/resources/view/homeproxy/client.js:1475
msgid "Direct IPv6 IP-s"
msgstr "直连 IPv6 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1433
msgid "Direct MAC-s"
msgstr "直连 MAC 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:123
#: htdocs/luci-static/resources/view/homeproxy/client.js:151
#: htdocs/luci-static/resources/view/homeproxy/client.js:1421
#: htdocs/luci-static/resources/view/homeproxy/node.js:499
#: htdocs/luci-static/resources/view/homeproxy/node.js:555
#: htdocs/luci-static/resources/view/homeproxy/node.js:568
#: htdocs/luci-static/resources/view/homeproxy/node.js:1115
#: htdocs/luci-static/resources/view/homeproxy/node.js:1368
#: htdocs/luci-static/resources/view/homeproxy/server.js:276
#: htdocs/luci-static/resources/view/homeproxy/server.js:289
msgid "Disable"
msgstr "禁用"
#: htdocs/luci-static/resources/view/homeproxy/client.js:363
msgid "Disable (the service)"
msgstr "禁用(服务)"
#: htdocs/luci-static/resources/view/homeproxy/client.js:787
#: htdocs/luci-static/resources/view/homeproxy/client.js:899
msgid "Disable DNS cache"
msgstr "禁用 DNS 缓存"
#: htdocs/luci-static/resources/view/homeproxy/client.js:788
msgid "Disable DNS cache in this query."
msgstr "在本次查询中禁用 DNS 缓存。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:670
msgid "Disable HTTP challenge"
msgstr "禁用 HTTP 验证"
#: htdocs/luci-static/resources/view/homeproxy/node.js:605
#: htdocs/luci-static/resources/view/homeproxy/server.js:321
msgid "Disable Path MTU discovery"
msgstr "禁用路径 MTU 探测"
#: htdocs/luci-static/resources/view/homeproxy/server.js:674
msgid "Disable TLS ALPN challenge"
msgstr "禁用 TLS ALPN 认证"
#: htdocs/luci-static/resources/view/homeproxy/client.js:710
msgid "Disable UDP domain unmapping"
msgstr "禁用 UDP 域名映射"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1181
msgid "Disable cache and save cache in this query."
msgstr "在本次查询中禁用缓存。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:901
msgid "Disable cache expire"
msgstr "缓存永不过期"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1180
msgid "Disable dns cache"
msgstr "禁用 DNS 缓存"
#: htdocs/luci-static/resources/view/homeproxy/node.js:606
#: htdocs/luci-static/resources/view/homeproxy/server.js:322
msgid ""
"Disables Path MTU Discovery (RFC 8899). Packets will then be at most 1252 "
"(IPv4) / 1232 (IPv6) bytes in size."
msgstr ""
"禁用路径 MTU 发现 (RFC 8899)。 数据包的大小最多为 1252 (IPv4) / 1232 (IPv6) "
"字节。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:814
#: htdocs/luci-static/resources/view/homeproxy/client.js:1250
msgid "Domain keyword"
msgstr "域名关键词"
#: htdocs/luci-static/resources/view/homeproxy/client.js:805
#: htdocs/luci-static/resources/view/homeproxy/client.js:1241
msgid "Domain name"
msgstr "域名"
#: htdocs/luci-static/resources/view/homeproxy/client.js:818
#: htdocs/luci-static/resources/view/homeproxy/client.js:1254
msgid "Domain regex"
msgstr "域名正则表达式"
#: htdocs/luci-static/resources/view/homeproxy/client.js:427
msgid "Domain resolver"
msgstr "域名解析器"
#: htdocs/luci-static/resources/view/homeproxy/client.js:347
#: htdocs/luci-static/resources/view/homeproxy/client.js:446
#: htdocs/luci-static/resources/view/homeproxy/client.js:1173
msgid "Domain strategy"
msgstr "域名解析策略"
#: htdocs/luci-static/resources/view/homeproxy/client.js:781
msgid "Domain strategy for resolving the domain names."
msgstr "目标域名的解析策略。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:810
#: htdocs/luci-static/resources/view/homeproxy/client.js:1246
msgid "Domain suffix"
msgstr "域名后缀"
#: htdocs/luci-static/resources/view/homeproxy/server.js:600
msgid "Domains"
msgstr "域名"
#: htdocs/luci-static/resources/view/homeproxy/client.js:774
msgid "Don't drop packets"
msgstr "不丢弃数据包"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1208
msgid "Don't drop requests"
msgstr "不丢弃请求"
#: htdocs/luci-static/resources/view/homeproxy/node.js:989
#: htdocs/luci-static/resources/view/homeproxy/server.js:517
msgid "Download bandwidth"
msgstr "下载带宽"
#: htdocs/luci-static/resources/view/homeproxy/node.js:990
#: htdocs/luci-static/resources/view/homeproxy/server.js:518
msgid "Download bandwidth in Mbps."
msgstr "下载带宽(单位:Mbps)。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:770
#: htdocs/luci-static/resources/view/homeproxy/client.js:776
msgid "Drop packets"
msgstr "丢弃数据包"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1203
#: htdocs/luci-static/resources/view/homeproxy/client.js:1210
msgid "Drop requests"
msgstr "丢弃请求"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1375
msgid ""
"Drop/keep nodes that contain the specific keywords. <a target=\"_blank\" "
"href=\"https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/"
"Regular_Expressions\">Regex</a> is supported."
msgstr ""
"丢弃/保留 包含指定关键词的节点。支持<a target=\"_blank\" href=\"https://"
"developer.mozilla.org/zh-CN/docs/Web/JavaScript/Guide/Regular_Expressions\">"
"正则表达式</a>。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1367
msgid "Drop/keep specific nodes from subscriptions."
msgstr "从订阅中 丢弃/保留 指定节点"
#: htdocs/luci-static/resources/view/homeproxy/server.js:691
msgid ""
"EAB (External Account Binding) contains information necessary to bind or map "
"an ACME account to some other account known by the CA.<br/>External account "
"bindings are \"used to associate an ACME account with an existing account in "
"a non-ACME system, such as a CA customer database."
msgstr ""
"EAB(外部帐户绑定)包含将 ACME 帐户绑定或映射到 CA 已知的其他帐户所需的信息。"
"<br/>外部帐户绑定“用于将 ACME 帐户与非 ACME 系统中的现有帐户相关联,例如 CA "
"客户数据库。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1094
msgid ""
"ECH (Encrypted Client Hello) is a TLS extension that allows a client to "
"encrypt the first part of its ClientHello message."
msgstr ""
"ECH(Encrypted Client Hello)是一个 TLS 扩展,它允许客户端加密其 ClientHello "
"信息的第一部分。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1109
#: htdocs/luci-static/resources/view/homeproxy/server.js:835
msgid "ECH config"
msgstr "ECH 配置"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1098
msgid "ECH config path"
msgstr "ECH 配置路径"
#: htdocs/luci-static/resources/view/homeproxy/server.js:794
msgid "ECH key"
msgstr "ECH 密钥"
#: htdocs/luci-static/resources/view/homeproxy/client.js:798
#: htdocs/luci-static/resources/view/homeproxy/client.js:908
#: htdocs/luci-static/resources/view/homeproxy/client.js:1193
msgid "EDNS Client subnet"
msgstr "ENDS 客户端子网"
#: htdocs/luci-static/resources/view/homeproxy/node.js:873
#: htdocs/luci-static/resources/view/homeproxy/server.js:481
msgid "Early data"
msgstr "前置数据"
#: htdocs/luci-static/resources/view/homeproxy/node.js:880
#: htdocs/luci-static/resources/view/homeproxy/server.js:488
msgid "Early data header name"
msgstr "前置数据标头"
#: htdocs/luci-static/resources/view/homeproxy/server.js:489
msgid "Early data is sent in path instead of header by default."
msgstr "前置数据默认发送在路径而不是标头中。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1211
msgid "Edit nodes"
msgstr "修改节点"
#: htdocs/luci-static/resources/view/homeproxy/server.js:612
msgid "Email"
msgstr "Email"
#: htdocs/luci-static/resources/view/homeproxy/client.js:414
#: htdocs/luci-static/resources/view/homeproxy/client.js:590
#: htdocs/luci-static/resources/view/homeproxy/client.js:942
#: htdocs/luci-static/resources/view/homeproxy/client.js:1069
#: htdocs/luci-static/resources/view/homeproxy/client.js:1328
#: htdocs/luci-static/resources/view/homeproxy/server.js:148
#: htdocs/luci-static/resources/view/homeproxy/server.js:166
msgid "Enable"
msgstr "启用"
#: htdocs/luci-static/resources/view/homeproxy/node.js:722
#: htdocs/luci-static/resources/view/homeproxy/server.js:371
msgid ""
"Enable 0-RTT QUIC connection handshake on the client side. This is not "
"impacting much on the performance, as the protocol is fully multiplexed.<br/"
">Disabling this is highly recommended, as it is vulnerable to replay attacks."
msgstr ""
"在客户端启用 0-RTT QUIC 连接握手。由于协议是完全复用的,这对性能影响不大。"
"<br/>强烈建议禁用此功能,因为它容易受到重放攻击。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:721
#: htdocs/luci-static/resources/view/homeproxy/server.js:370
msgid "Enable 0-RTT handshake"
msgstr "启用 0-RTT 握手"
#: htdocs/luci-static/resources/view/homeproxy/server.js:595
msgid "Enable ACME"
msgstr "启用 ACME"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1093
msgid "Enable ECH"
msgstr "启用 ECH"
#: htdocs/luci-static/resources/view/homeproxy/node.js:984
#: htdocs/luci-static/resources/view/homeproxy/server.js:512
msgid "Enable TCP Brutal"
msgstr "启用 TCP Brutal"
#: htdocs/luci-static/resources/view/homeproxy/node.js:985
#: htdocs/luci-static/resources/view/homeproxy/server.js:513
msgid "Enable TCP Brutal congestion control algorithm"
msgstr "启用 TCP Brutal 拥塞控制算法。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1168
#: htdocs/luci-static/resources/view/homeproxy/server.js:855
msgid "Enable UDP fragmentation."
msgstr "启用 UDP 分片。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:328
msgid "Enable endpoint-independent NAT"
msgstr "启用端点独立 NAT"
#: htdocs/luci-static/resources/view/homeproxy/node.js:980
#: htdocs/luci-static/resources/view/homeproxy/server.js:507
msgid "Enable padding"
msgstr "启用填充"
#: htdocs/luci-static/resources/view/homeproxy/server.js:846
msgid "Enable tcp fast open for listener."
msgstr "为监听器启用 TCP 快速打开。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1172
msgid ""
"Enable the SUoT protocol, requires server support. Conflict with multiplex."
msgstr "启用 SUoT 协议,需要服务端支持。与多路复用冲突。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:612
#: htdocs/luci-static/resources/view/homeproxy/node.js:748
#: htdocs/luci-static/resources/view/homeproxy/server.js:338
msgid "Encrypt method"
msgstr "加密方式"
#: htdocs/luci-static/resources/view/homeproxy/status.js:139
msgid "Error"
msgstr "错误"
#: htdocs/luci-static/resources/homeproxy.js:237
#: htdocs/luci-static/resources/homeproxy.js:271
#: htdocs/luci-static/resources/homeproxy.js:279
#: htdocs/luci-static/resources/homeproxy.js:297
#: htdocs/luci-static/resources/homeproxy.js:306
#: htdocs/luci-static/resources/homeproxy.js:317
#: htdocs/luci-static/resources/homeproxy.js:326
#: htdocs/luci-static/resources/homeproxy.js:328
#: htdocs/luci-static/resources/view/homeproxy/client.js:195
#: htdocs/luci-static/resources/view/homeproxy/client.js:207
#: htdocs/luci-static/resources/view/homeproxy/client.js:211
#: htdocs/luci-static/resources/view/homeproxy/client.js:230
#: htdocs/luci-static/resources/view/homeproxy/client.js:241
#: htdocs/luci-static/resources/view/homeproxy/client.js:245
#: htdocs/luci-static/resources/view/homeproxy/client.js:274
#: htdocs/luci-static/resources/view/homeproxy/client.js:504
#: htdocs/luci-static/resources/view/homeproxy/client.js:518
#: htdocs/luci-static/resources/view/homeproxy/client.js:521
#: htdocs/luci-static/resources/view/homeproxy/client.js:1356
#: htdocs/luci-static/resources/view/homeproxy/client.js:1361
#: htdocs/luci-static/resources/view/homeproxy/client.js:1364
#: htdocs/luci-static/resources/view/homeproxy/client.js:1506
#: htdocs/luci-static/resources/view/homeproxy/client.js:1538
#: htdocs/luci-static/resources/view/homeproxy/node.js:488
#: htdocs/luci-static/resources/view/homeproxy/node.js:1131
#: htdocs/luci-static/resources/view/homeproxy/node.js:1302
#: htdocs/luci-static/resources/view/homeproxy/node.js:1356
#: htdocs/luci-static/resources/view/homeproxy/node.js:1359
#: htdocs/luci-static/resources/view/homeproxy/server.js:235
#: htdocs/luci-static/resources/view/homeproxy/server.js:618
#: htdocs/luci-static/resources/view/homeproxy/server.js:620
msgid "Expecting: %s"
msgstr "请输入:%s"
#: htdocs/luci-static/resources/view/homeproxy/server.js:690
msgid "External Account Binding"
msgstr "外部账户绑定"
#: htdocs/luci-static/resources/view/homeproxy/server.js:702
msgid "External account MAC key"
msgstr "外部账户 MAC 密钥"
#: htdocs/luci-static/resources/view/homeproxy/server.js:696
msgid "External account key ID"
msgstr "外部账户密钥标识符"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1236
msgid "Extra records"
msgstr "附加记录"
#: htdocs/luci-static/resources/view/homeproxy/server.js:86
msgid "Failed to generate %s, error: %s."
msgstr "生成 %s 失败,错误:%s。"
#: htdocs/luci-static/resources/homeproxy.js:261
msgid "Failed to upload %s, error: %s."
msgstr "上传 %s 失败,错误:%s。"
#: htdocs/luci-static/resources/view/homeproxy/status.js:140
msgid "Fatal"
msgstr "致命"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1374
msgid "Filter keywords"
msgstr "过滤关键词"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1366
msgid "Filter nodes"
msgstr "过滤节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:630
msgid "Firefox / uquic firefox"
msgstr "Firefox / uquic firefox"
#: htdocs/luci-static/resources/view/homeproxy/server.js:171
msgid "Firewall"
msgstr "防火墙"
#: htdocs/luci-static/resources/view/homeproxy/node.js:736
#: htdocs/luci-static/resources/view/homeproxy/server.js:385
msgid "Flow"
msgstr "流控"
#: htdocs/luci-static/resources/view/homeproxy/client.js:428
msgid "For resolving domain name in the server address."
msgstr "用于解析服务器地址中的域名。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1339
msgid "Format"
msgstr "格式"
#: htdocs/luci-static/resources/view/homeproxy/client.js:730
msgid "Fragment TLS handshake into multiple TLS records."
msgstr "将 TLS 握手分片为多个 TLS 记录"
#: htdocs/luci-static/resources/view/homeproxy/client.js:736
msgid ""
"Fragment TLS handshakes. Due to poor performance, try <code>%s</code> first."
msgstr "分片 TLS 握手。由于性能较差,请先尝试 <code>%s</code>。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:742
msgid "Fragment fallback delay"
msgstr "分片回退延迟"
#: htdocs/luci-static/resources/view/homeproxy/node.js:842
msgid "GET"
msgstr "GET"
#: htdocs/luci-static/resources/view/homeproxy/status.js:256
msgid "GFW list version"
msgstr "GFW 域名列表版本"
#: htdocs/luci-static/resources/view/homeproxy/client.js:252
msgid "GFWList"
msgstr "GFWList"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1445
msgid "Gaming mode IPv4 IP-s"
msgstr "游戏模式 IPv4 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1447
msgid "Gaming mode IPv6 IP-s"
msgstr "游戏模式 IPv6 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1450
msgid "Gaming mode MAC-s"
msgstr "游戏模式 MAC 地址"
#: htdocs/luci-static/resources/view/homeproxy/server.js:36
#: htdocs/luci-static/resources/view/homeproxy/server.js:38
#: htdocs/luci-static/resources/view/homeproxy/server.js:827
msgid "Generate"
msgstr "生成"
#: htdocs/luci-static/resources/view/homeproxy/status.js:260
msgid "GitHub token"
msgstr "GitHub 令牌"
#: htdocs/luci-static/resources/view/homeproxy/client.js:256
msgid "Global"
msgstr "全局"
#: htdocs/luci-static/resources/view/homeproxy/node.js:759
msgid "Global padding"
msgstr "全局填充"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1452
msgid "Global proxy IPv4 IP-s"
msgstr "全局代理 IPv4 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1455
msgid "Global proxy IPv6 IP-s"
msgstr "全局代理 IPv6 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1458
msgid "Global proxy MAC-s"
msgstr "全局代理 MAC 地址"
#: htdocs/luci-static/resources/view/homeproxy/server.js:146
msgid "Global settings"
msgstr "全局设置"
#: htdocs/luci-static/resources/view/homeproxy/status.js:238
msgid "Google"
msgstr "谷歌"
#: htdocs/luci-static/resources/view/homeproxy/client.js:184
msgid "Google Public DNS (8.8.8.8)"
msgstr "谷歌公共 DNS(8.8.8.8)"
#: root/usr/share/rpcd/acl.d/luci-app-homeproxy.json:3
msgid "Grant access to homeproxy configuration"
msgstr "授予 homeproxy 访问 UCI 配置的权限"
#: htdocs/luci-static/resources/view/homeproxy/client.js:620
#: htdocs/luci-static/resources/view/homeproxy/client.js:1107
#: htdocs/luci-static/resources/view/homeproxy/node.js:430
#: htdocs/luci-static/resources/view/homeproxy/node.js:777
#: htdocs/luci-static/resources/view/homeproxy/server.js:177
#: htdocs/luci-static/resources/view/homeproxy/server.js:403
msgid "HTTP"
msgstr "HTTP"
#: htdocs/luci-static/resources/view/homeproxy/client.js:952
msgid "HTTP3"
msgstr "HTTP3"
#: htdocs/luci-static/resources/view/homeproxy/server.js:332
msgid ""
"HTTP3 server behavior when authentication fails.<br/>A 404 page will be "
"returned if empty."
msgstr "身份验证失败时的 HTTP3 服务器响应。默认返回 404 页面。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:951
msgid "HTTPS"
msgstr "HTTPS"
#: htdocs/luci-static/resources/view/homeproxy/node.js:778
#: htdocs/luci-static/resources/view/homeproxy/server.js:404
msgid "HTTPUpgrade"
msgstr "HTTPUpgrade"
#: htdocs/luci-static/resources/view/homeproxy/server.js:744
msgid "Handshake server address"
msgstr "握手服务器地址"
#: htdocs/luci-static/resources/view/homeproxy/server.js:750
msgid "Handshake server port"
msgstr "握手服务器端口"
#: htdocs/luci-static/resources/view/homeproxy/client.js:973
msgid "Headers"
msgstr "标头"
#: htdocs/luci-static/resources/view/homeproxy/node.js:727
#: htdocs/luci-static/resources/view/homeproxy/server.js:376
msgid "Heartbeat interval"
msgstr "心跳间隔"
#: htdocs/luci-static/resources/view/homeproxy/client.js:55
#: htdocs/luci-static/resources/view/homeproxy/client.js:57
#: htdocs/luci-static/resources/view/homeproxy/client.js:101
#: htdocs/luci-static/resources/view/homeproxy/status.js:282
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:3
msgid "HomeProxy"
msgstr "HomeProxy"
#: htdocs/luci-static/resources/view/homeproxy/server.js:58
#: htdocs/luci-static/resources/view/homeproxy/server.js:60
#: htdocs/luci-static/resources/view/homeproxy/server.js:129
msgid "HomeProxy Server"
msgstr "HomeProxy 服务端"
#: htdocs/luci-static/resources/view/homeproxy/node.js:535
msgid "Hop interval"
msgstr "跳跃间隔"
#: htdocs/luci-static/resources/view/homeproxy/node.js:529
msgid "Hopping port"
msgstr "跳跃端口"
#: htdocs/luci-static/resources/view/homeproxy/node.js:826
#: htdocs/luci-static/resources/view/homeproxy/node.js:831
#: htdocs/luci-static/resources/view/homeproxy/node.js:865
#: htdocs/luci-static/resources/view/homeproxy/server.js:437
#: htdocs/luci-static/resources/view/homeproxy/server.js:442
#: htdocs/luci-static/resources/view/homeproxy/server.js:473
msgid "Host"
msgstr "主机名"
#: htdocs/luci-static/resources/view/homeproxy/node.js:669
msgid "Host key"
msgstr "主机密钥"
#: htdocs/luci-static/resources/view/homeproxy/node.js:674
msgid "Host key algorithms"
msgstr "主机密钥算法"
#: htdocs/luci-static/resources/view/homeproxy/client.js:581
#: htdocs/luci-static/resources/view/homeproxy/client.js:1060
msgid "Host/IP fields"
msgstr "主机/IP 字段"
#: htdocs/luci-static/resources/view/homeproxy/server.js:364
msgid ""
"How long the server should wait for the client to send the authentication "
"command (in seconds)."
msgstr "服务器等待客户端发送认证命令的时间(单位:秒)。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:432
#: htdocs/luci-static/resources/view/homeproxy/server.js:179
msgid "Hysteria"
msgstr "Hysteria"
#: htdocs/luci-static/resources/view/homeproxy/node.js:433
#: htdocs/luci-static/resources/view/homeproxy/server.js:180
msgid "Hysteria2"
msgstr "Hysteria2"
#: htdocs/luci-static/resources/view/homeproxy/client.js:830
#: htdocs/luci-static/resources/view/homeproxy/client.js:1266
msgid "IP CIDR"
msgstr "IP CIDR"
#: htdocs/luci-static/resources/view/homeproxy/client.js:608
#: htdocs/luci-static/resources/view/homeproxy/client.js:1088
msgid "IP version"
msgstr "IP 版本"
#: htdocs/luci-static/resources/view/homeproxy/client.js:610
#: htdocs/luci-static/resources/view/homeproxy/client.js:1089
msgid "IPv4"
msgstr "IPv4"
#: htdocs/luci-static/resources/homeproxy.js:20
msgid "IPv4 only"
msgstr "仅 IPv4"
#: htdocs/luci-static/resources/view/homeproxy/client.js:611
#: htdocs/luci-static/resources/view/homeproxy/client.js:1090
msgid "IPv6"
msgstr "IPv6"
#: htdocs/luci-static/resources/homeproxy.js:21
msgid "IPv6 only"
msgstr "仅 IPv6"
#: htdocs/luci-static/resources/view/homeproxy/client.js:297
msgid "IPv6 support"
msgstr "IPv6 支持"
#: htdocs/luci-static/resources/view/homeproxy/node.js:506
msgid "Idle session check interval"
msgstr "空闲会话检查间隔"
#: htdocs/luci-static/resources/view/homeproxy/node.js:513
msgid "Idle session check timeout"
msgstr "空闲会话检查超时"
#: htdocs/luci-static/resources/view/homeproxy/client.js:553
#: htdocs/luci-static/resources/view/homeproxy/node.js:847
#: htdocs/luci-static/resources/view/homeproxy/server.js:456
msgid "Idle timeout"
msgstr "空闲超时"
#: htdocs/luci-static/resources/view/homeproxy/client.js:717
msgid ""
"If enabled, attempts to connect UDP connection to the destination instead of "
"listen."
msgstr "如果启用,尝试主动连接到目标的 UDP 而不是监听。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:711
msgid ""
"If enabled, for UDP proxy requests addressed to a domain, the original "
"packet address will be sent in the response instead of the mapped domain."
msgstr ""
"如果启用,对于发送到域名的 UDP 代理请求,响应中将发送原始数据包地址而不是映射"
"的域名。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:819
msgid ""
"If enabled, the client transport sends keepalive pings even with no active "
"connections."
msgstr "如果启用,客户端传输即使没有活动连接也会发送 keepalive ping。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:348
msgid ""
"If set, the requested domain name will be resolved to IP before routing."
msgstr "如果设置,请求的域名将在路由前被解析为 IP 地址。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:804
#: htdocs/luci-static/resources/view/homeproxy/server.js:425
msgid ""
"If the transport doesn't see any activity after a duration of this time (in "
"seconds), it pings the client to check if the connection is still active."
msgstr ""
"如果传输在此时间段(单位:秒)后没有看到任何活动,它会向客户端发送 ping 请求"
"以检查连接是否仍然活动。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1073
msgid ""
"If you have the root certificate, use this option instead of allowing "
"insecure."
msgstr "如果你拥有根证书,使用此选项而不是允许不安全连接。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:326
msgid "Ignore client bandwidth"
msgstr "忽略客户端带宽"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1285
msgid "Import"
msgstr "导入"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1232
#: htdocs/luci-static/resources/view/homeproxy/node.js:1311
#: htdocs/luci-static/resources/view/homeproxy/node.js:1313
msgid "Import share links"
msgstr "导入分享链接"
#: htdocs/luci-static/resources/view/homeproxy/client.js:336
#: htdocs/luci-static/resources/view/homeproxy/server.js:860
msgid "In seconds."
msgstr "单位:秒。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:937
msgid "In seconds. Disabled by default."
msgstr "单位:秒。默认禁用。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:521
msgid ""
"In the check, at least the first <code>n</code> idle sessions are kept open."
msgstr "在检查中,至少保持前 <code>n</code> 个空闲会话处于打开状态。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:514
msgid ""
"In the check, close sessions that have been idle for longer than this, in "
"seconds."
msgstr "在检查中,关闭空闲时间超过此值的会话(单位:秒)。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:904
msgid "Independent cache per server"
msgstr "独立缓存"
#: htdocs/luci-static/resources/view/homeproxy/status.js:137
msgid "Info"
msgstr "信息"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1404
msgid "Interface Control"
msgstr "接口控制"
#: htdocs/luci-static/resources/view/homeproxy/client.js:560
msgid "Interrupt existing connections"
msgstr "中断现有连接"
#: htdocs/luci-static/resources/view/homeproxy/client.js:561
msgid "Interrupt existing connections when the selected outbound has changed."
msgstr "当选择的出站发生变化时中断现有连接。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:507
msgid "Interval checking for idle sessions, in seconds."
msgstr "空闲会话检查间隔(单位:秒)。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:728
#: htdocs/luci-static/resources/view/homeproxy/server.js:377
msgid ""
"Interval for sending heartbeat packets for keeping the connection alive (in "
"seconds)."
msgstr "发送心跳包以保持连接存活的时间间隔(单位:秒)。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:665
#: htdocs/luci-static/resources/view/homeproxy/client.js:1141
msgid "Invert"
msgstr "反转"
#: htdocs/luci-static/resources/view/homeproxy/client.js:666
#: htdocs/luci-static/resources/view/homeproxy/client.js:1142
msgid "Invert match result."
msgstr "反转匹配结果"
#: htdocs/luci-static/resources/view/homeproxy/server.js:775
msgid "Key path"
msgstr "证书路径"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1418
msgid "LAN IP Policy"
msgstr "LAN IP 策略"
#: htdocs/luci-static/resources/view/homeproxy/client.js:409
#: htdocs/luci-static/resources/view/homeproxy/client.js:585
#: htdocs/luci-static/resources/view/homeproxy/client.js:937
#: htdocs/luci-static/resources/view/homeproxy/client.js:1064
#: htdocs/luci-static/resources/view/homeproxy/client.js:1323
#: htdocs/luci-static/resources/view/homeproxy/node.js:422
#: htdocs/luci-static/resources/view/homeproxy/server.js:160
msgid "Label"
msgstr "标签"
#: htdocs/luci-static/resources/view/homeproxy/node.js:743
#: htdocs/luci-static/resources/view/homeproxy/server.js:392
msgid ""
"Legacy protocol support (VMess MD5 Authentication) is provided for "
"compatibility purposes only, use of alterId > 1 is not recommended."
msgstr ""
"提供旧协议支持(VMess MD5 身份验证)仅出于兼容性目的,不建议使用 alterId > "
"1。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:325
msgid "Less compatibility and sometimes better performance."
msgstr "有时性能更好。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:629
msgid "Let's Encrypt"
msgstr "Let's Encrypt"
#: htdocs/luci-static/resources/view/homeproxy/node.js:897
msgid ""
"List of IP (v4 or v6) addresses prefixes to be assigned to the interface."
msgstr "分配给接口的 IP(v4 或 v6)地址前缀列表。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:132
#: htdocs/luci-static/resources/view/homeproxy/client.js:161
#: htdocs/luci-static/resources/view/homeproxy/client.js:497
msgid "List of nodes to test."
msgstr "要测试的节点列表。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1036
#: htdocs/luci-static/resources/view/homeproxy/server.js:566
msgid "List of supported application level protocols, in order of preference."
msgstr "支持的应用层协议协商列表,按顺序排列。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1227
msgid "List of text DNS record to respond as answers."
msgstr "要响应的 DNS 记录列表。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1237
msgid "List of text DNS record to respond as extra records."
msgstr "要响应的附加 DNS 记录列表。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1232
msgid "List of text DNS record to respond as name servers."
msgstr "要响应的域名服务器(NS) DNS 记录列表。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:193
msgid "Listen address"
msgstr "监听地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1406
msgid "Listen interfaces"
msgstr "监听接口"
#: htdocs/luci-static/resources/view/homeproxy/server.js:198
msgid "Listen port"
msgstr "监听端口"
#: htdocs/luci-static/resources/view/homeproxy/status.js:174
msgid "Loading"
msgstr "加载中"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1334
msgid "Local"
msgstr "本地"
#: htdocs/luci-static/resources/view/homeproxy/node.js:896
msgid "Local address"
msgstr "本地地址"
#: htdocs/luci-static/resources/view/homeproxy/status.js:191
msgid "Log file does not exist."
msgstr "日志文件不存在。"
#: htdocs/luci-static/resources/view/homeproxy/status.js:184
msgid "Log is empty."
msgstr "日志为空。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:930
msgid "MTU"
msgstr "MTU"
#: htdocs/luci-static/resources/view/homeproxy/client.js:150
msgid "Main UDP node"
msgstr "主 UDP 节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:122
msgid "Main node"
msgstr "主节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:662
msgid "Make IP CIDR in rule set used to match the source IP."
msgstr "使规则集中的 IP CIDR 用于匹配源 IP。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1134
msgid "Make IP CIDR in rule sets match the source IP."
msgstr "使规则集中的 IP CIDR 匹配源 IP。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1138
msgid "Make IP CIDR in rule-sets accept empty query response."
msgstr "使规则集中的 IP CIDR 接受空查询响应。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:905
msgid ""
"Make each DNS server's cache independent for special purposes. If enabled, "
"will slightly degrade performance."
msgstr "独立缓存每个 DNS 服务器的结果以供特殊用途。启用后会略微降低性能。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:331
msgid "Masquerade"
msgstr "伪装"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1267
msgid ""
"Match IP CIDR with query response. Current rule will be skipped if not match."
msgstr "使用查询响应匹配 IP CIDR。如果不匹配,则跳过当前规则。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:831
msgid "Match IP CIDR."
msgstr "匹配 IP CIDR。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:811
#: htdocs/luci-static/resources/view/homeproxy/client.js:1247
msgid "Match domain suffix."
msgstr "匹配域名后缀。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:815
#: htdocs/luci-static/resources/view/homeproxy/client.js:1251
msgid "Match domain using keyword."
msgstr "使用关键词匹配域名。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:819
#: htdocs/luci-static/resources/view/homeproxy/client.js:1255
msgid "Match domain using regular expression."
msgstr "使用正则表达式匹配域名。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:806
#: htdocs/luci-static/resources/view/homeproxy/client.js:1242
msgid "Match full domain."
msgstr "匹配完整域名。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:854
#: htdocs/luci-static/resources/view/homeproxy/client.js:1291
msgid "Match port range. Format as START:/:END/START:END."
msgstr "匹配端口范围。格式为 START:/:END/START:END。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:849
#: htdocs/luci-static/resources/view/homeproxy/client.js:1286
msgid "Match port."
msgstr "匹配端口。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:835
#: htdocs/luci-static/resources/view/homeproxy/client.js:1271
msgid "Match private IP"
msgstr "匹配私有 IP"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1272
msgid "Match private IP with query response."
msgstr "使用查询响应匹配私有 IP。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:827
#: htdocs/luci-static/resources/view/homeproxy/client.js:1263
msgid "Match private source IP"
msgstr "匹配私有源 IP"
#: htdocs/luci-static/resources/view/homeproxy/client.js:859
#: htdocs/luci-static/resources/view/homeproxy/client.js:1296
msgid "Match process name."
msgstr "匹配进程名。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:867
#: htdocs/luci-static/resources/view/homeproxy/client.js:1304
msgid "Match process path using regular expression."
msgstr "使用正则表达式匹配进程路径。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:863
#: htdocs/luci-static/resources/view/homeproxy/client.js:1300
msgid "Match process path."
msgstr "匹配进程路径。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1095
msgid "Match query type."
msgstr "匹配请求类型。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:647
#: htdocs/luci-static/resources/view/homeproxy/client.js:1119
msgid "Match rule set."
msgstr "匹配规则集。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:823
#: htdocs/luci-static/resources/view/homeproxy/client.js:1259
msgid "Match source IP CIDR."
msgstr "匹配源 IP CIDR。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:844
#: htdocs/luci-static/resources/view/homeproxy/client.js:1281
msgid "Match source port range. Format as START:/:END/START:END."
msgstr "匹配源端口范围。格式为 START:/:END/START:END。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:839
#: htdocs/luci-static/resources/view/homeproxy/client.js:1276
msgid "Match source port."
msgstr "匹配源端口。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:643
#: htdocs/luci-static/resources/view/homeproxy/client.js:1115
msgid "Match user name."
msgstr "匹配用户名。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:579
#: htdocs/luci-static/resources/view/homeproxy/server.js:261
msgid "Max download speed"
msgstr "最大下载速度"
#: htdocs/luci-static/resources/view/homeproxy/node.js:580
#: htdocs/luci-static/resources/view/homeproxy/server.js:262
msgid "Max download speed in Mbps."
msgstr "最大下载速度(Mbps)。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:739
msgid "Max time difference"
msgstr "最大时间差"
#: htdocs/luci-static/resources/view/homeproxy/node.js:586
#: htdocs/luci-static/resources/view/homeproxy/server.js:268
msgid "Max upload speed"
msgstr "最大上传速度"
#: htdocs/luci-static/resources/view/homeproxy/node.js:587
#: htdocs/luci-static/resources/view/homeproxy/server.js:269
msgid "Max upload speed in Mbps."
msgstr "最大上传速度(Mbps)。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1056
#: htdocs/luci-static/resources/view/homeproxy/server.js:578
msgid "Maximum TLS version"
msgstr "最大 TLS 版本"
#: htdocs/luci-static/resources/view/homeproxy/node.js:961
#: htdocs/luci-static/resources/view/homeproxy/node.js:975
msgid "Maximum connections"
msgstr "最大连接数"
#: htdocs/luci-static/resources/view/homeproxy/node.js:973
msgid ""
"Maximum multiplexed streams in a connection before opening a new connection."
"<br/>Conflict with <code>%s</code> and <code>%s</code>."
msgstr ""
"在打开新连接之前,连接中的最大多路复用流数量。与 <code>%s</code> 和 "
"<code>%s</code> 冲突。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:972
msgid "Maximum streams"
msgstr "最大流数量"
#: htdocs/luci-static/resources/view/homeproxy/client.js:768
#: htdocs/luci-static/resources/view/homeproxy/client.js:776
#: htdocs/luci-static/resources/view/homeproxy/client.js:1201
#: htdocs/luci-static/resources/view/homeproxy/client.js:1210
#: htdocs/luci-static/resources/view/homeproxy/node.js:841
#: htdocs/luci-static/resources/view/homeproxy/server.js:452
msgid "Method"
msgstr "方式"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1048
#: htdocs/luci-static/resources/view/homeproxy/server.js:570
msgid "Minimum TLS version"
msgstr "最低 TLS 版本"
#: htdocs/luci-static/resources/view/homeproxy/node.js:520
msgid "Minimum idle sessions"
msgstr "最小空闲会话数"
#: htdocs/luci-static/resources/view/homeproxy/node.js:967
msgid ""
"Minimum multiplexed streams in a connection before opening a new connection."
msgstr "在打开新连接之前,连接中的最小多路复用流数量。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:966
#: htdocs/luci-static/resources/view/homeproxy/node.js:975
msgid "Minimum streams"
msgstr "最小流数量"
#: htdocs/luci-static/resources/view/homeproxy/client.js:310
#: htdocs/luci-static/resources/view/homeproxy/server.js:183
msgid "Mixed"
msgstr "混合"
#: htdocs/luci-static/resources/view/homeproxy/client.js:321
msgid "Mixed <code>system</code> TCP stack and <code>gVisor</code> UDP stack."
msgstr "混合<code>系统</code> TCP 栈和 <code>gVisor</code> UDP 栈。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:595
#: htdocs/luci-static/resources/view/homeproxy/client.js:1074
msgid "Mode"
msgstr "模式"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1164
#: htdocs/luci-static/resources/view/homeproxy/server.js:850
msgid "MultiPath TCP"
msgstr "多路径 TCP(MPTCP)"
#: htdocs/luci-static/resources/view/homeproxy/node.js:944
#: htdocs/luci-static/resources/view/homeproxy/server.js:500
msgid "Multiplex"
msgstr "多路复用"
#: htdocs/luci-static/resources/view/homeproxy/node.js:952
msgid "Multiplex protocol."
msgstr "多路复用协议。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:57
#: htdocs/luci-static/resources/view/homeproxy/server.js:60
msgid "NOT RUNNING"
msgstr "未运行"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1395
msgid "NOTE: Save current settings before updating subscriptions."
msgstr "注意:更新订阅前先保存当前配置。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1231
msgid "NS"
msgstr "NS"
#: htdocs/luci-static/resources/view/homeproxy/node.js:711
msgid "Native"
msgstr "原生"
#: htdocs/luci-static/resources/view/homeproxy/server.js:181
msgid "NaïveProxy"
msgstr "NaïveProxy"
#: htdocs/luci-static/resources/view/homeproxy/client.js:637
#: htdocs/luci-static/resources/view/homeproxy/client.js:1098
#: htdocs/luci-static/resources/view/homeproxy/server.js:866
msgid "Network"
msgstr "网络"
#: htdocs/luci-static/resources/view/homeproxy/node.js:701
msgid "New Reno"
msgstr "New Reno"
#: htdocs/luci-static/resources/view/homeproxy/node.js:774
#: htdocs/luci-static/resources/view/homeproxy/node.js:791
#: htdocs/luci-static/resources/view/homeproxy/server.js:400
#: htdocs/luci-static/resources/view/homeproxy/server.js:417
msgid "No TCP transport, plain HTTP is merged into the HTTP transport."
msgstr "无 TCP 传输层, 纯 HTTP 已合并到 HTTP 传输层。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:789
#: htdocs/luci-static/resources/view/homeproxy/server.js:415
msgid "No additional encryption support: It's basically duplicate encryption."
msgstr "无额外加密支持:它基本上是重复加密。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1412
msgid "No subscription available"
msgstr "无可用订阅"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1437
msgid "No subscription node"
msgstr "无订阅节点"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1271
msgid "No valid share link found."
msgstr "找不到有效分享链接。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:419
#: htdocs/luci-static/resources/view/homeproxy/node.js:396
msgid "Node"
msgstr "节点"
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:22
msgid "Node Settings"
msgstr "节点设置"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1217
msgid "Nodes"
msgstr "节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:993
#: htdocs/luci-static/resources/view/homeproxy/node.js:737
#: htdocs/luci-static/resources/view/homeproxy/node.js:775
#: htdocs/luci-static/resources/view/homeproxy/server.js:386
#: htdocs/luci-static/resources/view/homeproxy/server.js:401
msgid "None"
msgstr "无"
#: htdocs/luci-static/resources/view/homeproxy/node.js:573
#: htdocs/luci-static/resources/view/homeproxy/server.js:294
msgid "Obfuscate password"
msgstr "混淆密码"
#: htdocs/luci-static/resources/view/homeproxy/node.js:567
#: htdocs/luci-static/resources/view/homeproxy/server.js:288
msgid "Obfuscate type"
msgstr "混淆类型"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1407
msgid "Only process traffic from specific interfaces. Leave empty for all."
msgstr "只处理来自指定接口的流量。留空表示全部。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:254
msgid "Only proxy mainland China"
msgstr "仅代理中国大陆"
#: htdocs/luci-static/resources/view/homeproxy/client.js:580
#: htdocs/luci-static/resources/view/homeproxy/client.js:1059
msgid "Other fields"
msgstr "其他字段"
#: htdocs/luci-static/resources/view/homeproxy/client.js:460
#: htdocs/luci-static/resources/view/homeproxy/client.js:678
#: htdocs/luci-static/resources/view/homeproxy/client.js:1026
#: htdocs/luci-static/resources/view/homeproxy/client.js:1374
msgid "Outbound"
msgstr "出站"
#: htdocs/luci-static/resources/view/homeproxy/client.js:420
msgid "Outbound node"
msgstr "出站节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:696
msgid "Override address"
msgstr "覆盖地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:352
msgid "Override destination"
msgstr "覆盖目标地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:703
msgid "Override port"
msgstr "覆盖端口"
#: htdocs/luci-static/resources/view/homeproxy/client.js:353
msgid "Override the connection destination address with the sniffed domain."
msgstr "使用嗅探到的域名覆盖连接目标。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:697
msgid "Override the connection destination address."
msgstr "覆盖目标连接地址。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:704
msgid "Override the connection destination port."
msgstr "覆盖目标连接端口。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:843
msgid "PUT"
msgstr "PUT"
#: htdocs/luci-static/resources/view/homeproxy/node.js:886
msgid "Packet encoding"
msgstr "数据包编码"
#: htdocs/luci-static/resources/view/homeproxy/server.js:244
msgid "Padding scheme"
msgstr "填充方案"
#: htdocs/luci-static/resources/view/homeproxy/status.js:141
msgid "Panic"
msgstr "崩溃"
#: htdocs/luci-static/resources/view/homeproxy/node.js:464
#: htdocs/luci-static/resources/view/homeproxy/server.js:210
msgid "Password"
msgstr "密码"
#: htdocs/luci-static/resources/view/homeproxy/client.js:966
#: htdocs/luci-static/resources/view/homeproxy/client.js:1345
#: htdocs/luci-static/resources/view/homeproxy/node.js:836
#: htdocs/luci-static/resources/view/homeproxy/node.js:869
#: htdocs/luci-static/resources/view/homeproxy/server.js:447
#: htdocs/luci-static/resources/view/homeproxy/server.js:477
msgid "Path"
msgstr "路径"
#: htdocs/luci-static/resources/view/homeproxy/node.js:911
msgid "Peer pubkic key"
msgstr "对端公钥"
#: htdocs/luci-static/resources/view/homeproxy/client.js:329
msgid ""
"Performance may degrade slightly, so it is not recommended to enable on when "
"it is not needed."
msgstr "性能可能会略有下降,建议仅在需要时开启。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:936
msgid "Persistent keepalive interval"
msgstr "持久 keepalive 间隔"
#: htdocs/luci-static/resources/view/homeproxy/node.js:855
#: htdocs/luci-static/resources/view/homeproxy/server.js:464
msgid "Ping timeout"
msgstr "Ping 超时"
#: htdocs/luci-static/resources/view/homeproxy/node.js:630
msgid "Plugin"
msgstr "插件"
#: htdocs/luci-static/resources/view/homeproxy/node.js:637
msgid "Plugin opts"
msgstr "插件参数"
#: htdocs/luci-static/resources/view/homeproxy/client.js:848
#: htdocs/luci-static/resources/view/homeproxy/client.js:962
#: htdocs/luci-static/resources/view/homeproxy/client.js:1285
#: htdocs/luci-static/resources/view/homeproxy/node.js:453
msgid "Port"
msgstr "端口"
#: htdocs/luci-static/resources/view/homeproxy/client.js:276
msgid "Port %s alrealy exists!"
msgstr "端口 %s 已存在!"
#: htdocs/luci-static/resources/view/homeproxy/client.js:582
#: htdocs/luci-static/resources/view/homeproxy/client.js:1061
msgid "Port fields"
msgstr "端口字段"
#: htdocs/luci-static/resources/view/homeproxy/node.js:536
msgid "Port hopping interval in seconds."
msgstr "端口跳跃间隔(单位:秒)。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:853
#: htdocs/luci-static/resources/view/homeproxy/client.js:1290
msgid "Port range"
msgstr "端口范围"
#: htdocs/luci-static/resources/view/homeproxy/node.js:918
msgid "Pre-shared key"
msgstr "预共享密钥"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1149
msgid "Predefined"
msgstr "预定义"
#: htdocs/luci-static/resources/homeproxy.js:18
msgid "Prefer IPv4"
msgstr "优先 IPv4"
#: htdocs/luci-static/resources/homeproxy.js:19
msgid "Prefer IPv6"
msgstr "优先 IPv6"
#: htdocs/luci-static/resources/view/homeproxy/node.js:678
#: htdocs/luci-static/resources/view/homeproxy/node.js:903
msgid "Private key"
msgstr "私钥"
#: htdocs/luci-static/resources/view/homeproxy/node.js:683
msgid "Private key passphrase"
msgstr "私钥指纹"
#: htdocs/luci-static/resources/view/homeproxy/client.js:583
#: htdocs/luci-static/resources/view/homeproxy/client.js:1062
msgid "Process fields"
msgstr "进程字段"
#: htdocs/luci-static/resources/view/homeproxy/client.js:858
#: htdocs/luci-static/resources/view/homeproxy/client.js:1295
msgid "Process name"
msgstr "进程名"
#: htdocs/luci-static/resources/view/homeproxy/client.js:862
#: htdocs/luci-static/resources/view/homeproxy/client.js:1299
msgid "Process path"
msgstr "进程路径"
#: htdocs/luci-static/resources/view/homeproxy/client.js:866
#: htdocs/luci-static/resources/view/homeproxy/client.js:1303
msgid "Process path (regex)"
msgstr "进程路径(正则表达式)"
#: htdocs/luci-static/resources/view/homeproxy/client.js:615
#: htdocs/luci-static/resources/view/homeproxy/client.js:1103
#: htdocs/luci-static/resources/view/homeproxy/node.js:543
#: htdocs/luci-static/resources/view/homeproxy/node.js:951
#: htdocs/luci-static/resources/view/homeproxy/server.js:250
msgid "Protocol"
msgstr "协议"
#: htdocs/luci-static/resources/view/homeproxy/node.js:767
msgid "Protocol parameter. Enable length block encryption."
msgstr "协议参数。启用长度块加密。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:760
msgid ""
"Protocol parameter. Will waste traffic randomly if enabled (enabled by "
"default in v2ray and cannot be disabled)."
msgstr "协议参数。 如启用会随机浪费流量(在 v2ray 中默认启用并且无法禁用)。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1481
msgid "Proxy Domain List"
msgstr "代理域名列表"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1436
#: htdocs/luci-static/resources/view/homeproxy/client.js:1465
msgid "Proxy IPv4 IP-s"
msgstr "代理 IPv4 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1439
#: htdocs/luci-static/resources/view/homeproxy/client.js:1468
msgid "Proxy IPv6 IP-s"
msgstr "代理 IPv6 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1442
msgid "Proxy MAC-s"
msgstr "代理 MAC 地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1423
msgid "Proxy all except listed"
msgstr "仅允许列表外"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1420
msgid "Proxy filter mode"
msgstr "代理过滤模式"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1422
msgid "Proxy listed only"
msgstr "仅允许列表内"
#: htdocs/luci-static/resources/view/homeproxy/client.js:284
msgid "Proxy mode"
msgstr "代理模式"
#: htdocs/luci-static/resources/view/homeproxy/node.js:497
msgid "Proxy protocol"
msgstr "代理协议"
#: htdocs/luci-static/resources/view/homeproxy/client.js:621
#: htdocs/luci-static/resources/view/homeproxy/client.js:953
#: htdocs/luci-static/resources/view/homeproxy/client.js:1108
#: htdocs/luci-static/resources/view/homeproxy/node.js:712
#: htdocs/luci-static/resources/view/homeproxy/node.js:779
#: htdocs/luci-static/resources/view/homeproxy/server.js:405
msgid "QUIC"
msgstr "QUIC"
#: htdocs/luci-static/resources/view/homeproxy/node.js:699
#: htdocs/luci-static/resources/view/homeproxy/server.js:355
msgid "QUIC congestion control algorithm."
msgstr "QUIC 拥塞控制算法。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:599
#: htdocs/luci-static/resources/view/homeproxy/server.js:307
msgid "QUIC connection receive window"
msgstr "QUIC 连接窗口"
#: htdocs/luci-static/resources/view/homeproxy/server.js:314
msgid "QUIC maximum concurrent bidirectional streams"
msgstr "QUIC 最大双向并发流"
#: htdocs/luci-static/resources/view/homeproxy/node.js:593
#: htdocs/luci-static/resources/view/homeproxy/server.js:300
msgid "QUIC stream receive window"
msgstr "QUIC 流接收窗口"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1094
msgid "Query type"
msgstr "请求类型"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1214
msgid "RCode"
msgstr "RCode"
#: htdocs/luci-static/resources/view/homeproxy/client.js:622
#: htdocs/luci-static/resources/view/homeproxy/client.js:1109
msgid "RDP"
msgstr "RDP"
#: htdocs/luci-static/resources/view/homeproxy/client.js:917
msgid "RDRC timeout"
msgstr "RDRC 超时"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1142
#: htdocs/luci-static/resources/view/homeproxy/server.js:709
msgid "REALITY"
msgstr "REALITY"
#: htdocs/luci-static/resources/view/homeproxy/server.js:714
msgid "REALITY private key"
msgstr "REALITY 私钥"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1147
#: htdocs/luci-static/resources/view/homeproxy/server.js:730
msgid "REALITY public key"
msgstr "REALITY 公钥"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1153
#: htdocs/luci-static/resources/view/homeproxy/server.js:734
msgid "REALITY short ID"
msgstr "REALITY 标识符"
#: htdocs/luci-static/resources/view/homeproxy/client.js:55
#: htdocs/luci-static/resources/view/homeproxy/server.js:58
msgid "RUNNING"
msgstr "运行中"
#: htdocs/luci-static/resources/view/homeproxy/node.js:665
msgid "Random version will be used if empty."
msgstr "如留空,则使用随机版本。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:488
msgid "Recursive outbound detected!"
msgstr "检测到递归出站!"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1012
msgid "Recursive resolver detected!"
msgstr "检测到递归解析器!"
#: htdocs/luci-static/resources/view/homeproxy/client.js:285
msgid "Redirect TCP"
msgstr "Redirect TCP"
#: htdocs/luci-static/resources/view/homeproxy/client.js:287
msgid "Redirect TCP + TProxy UDP"
msgstr "Redirect TCP + TProxy UDP"
#: htdocs/luci-static/resources/view/homeproxy/client.js:289
msgid "Redirect TCP + Tun UDP"
msgstr "Redirect TCP + Tun UDP"
#: htdocs/luci-static/resources/view/homeproxy/status.js:219
msgid "Refresh every %s seconds."
msgstr "每 %s 秒刷新。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:659
msgid "Region ID"
msgstr "区域 ID"
#: htdocs/luci-static/resources/view/homeproxy/client.js:672
#: htdocs/luci-static/resources/view/homeproxy/client.js:1148
msgid "Reject"
msgstr "拒绝"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1335
msgid "Remote"
msgstr "远程"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1434
msgid "Remove %s nodes"
msgstr "移除 %s 个节点"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1424
msgid "Remove all nodes from subscriptions"
msgstr "移除所有订阅节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1202
msgid "Reply with REFUSED"
msgstr "回复 REFUSED"
#: htdocs/luci-static/resources/view/homeproxy/client.js:769
msgid "Reply with TCP RST / ICMP port unreachable"
msgstr "回复 TCP RST / ICMP 端口不可达"
#: htdocs/luci-static/resources/view/homeproxy/node.js:925
msgid "Reserved field bytes"
msgstr "保留字段字节"
#: htdocs/luci-static/resources/view/homeproxy/client.js:673
msgid "Resolve"
msgstr "解析"
#: htdocs/luci-static/resources/view/homeproxy/client.js:780
msgid "Resolve strategy"
msgstr "解析策略"
#: htdocs/luci-static/resources/view/homeproxy/status.js:241
msgid "Resources management"
msgstr "资源管理"
#: htdocs/luci-static/resources/view/homeproxy/server.js:880
msgid "Reuse address"
msgstr "复用地址"
#: htdocs/luci-static/resources/view/homeproxy/server.js:881
msgid "Reuse listener address."
msgstr "复用监听地址。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:792
#: htdocs/luci-static/resources/view/homeproxy/client.js:1186
msgid "Rewrite TTL"
msgstr "重写 TTL"
#: htdocs/luci-static/resources/view/homeproxy/client.js:793
#: htdocs/luci-static/resources/view/homeproxy/client.js:1187
msgid "Rewrite TTL in DNS responses."
msgstr "在 DNS 响应中重写 TTL。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:670
#: htdocs/luci-static/resources/view/homeproxy/client.js:1146
msgid "Route"
msgstr "路由"
#: htdocs/luci-static/resources/view/homeproxy/client.js:671
#: htdocs/luci-static/resources/view/homeproxy/client.js:1147
msgid "Route options"
msgstr "路由选项"
#: htdocs/luci-static/resources/view/homeproxy/client.js:396
msgid "Routing Nodes"
msgstr "路由节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:567
msgid "Routing Rules"
msgstr "路由规则"
#: htdocs/luci-static/resources/view/homeproxy/client.js:120
msgid "Routing Settings"
msgstr "路由设置"
#: htdocs/luci-static/resources/view/homeproxy/client.js:251
msgid "Routing mode"
msgstr "路由模式"
#: htdocs/luci-static/resources/view/homeproxy/client.js:405
msgid "Routing node"
msgstr "路由节点"
#: htdocs/luci-static/resources/view/homeproxy/client.js:264
msgid "Routing ports"
msgstr "路由端口"
#: htdocs/luci-static/resources/view/homeproxy/client.js:576
msgid "Routing rule"
msgstr "路由规则"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1310
msgid "Rule Set"
msgstr "规则集"
#: htdocs/luci-static/resources/view/homeproxy/client.js:646
#: htdocs/luci-static/resources/view/homeproxy/client.js:1118
#: htdocs/luci-static/resources/view/homeproxy/client.js:1319
msgid "Rule set"
msgstr "规则集"
#: htdocs/luci-static/resources/view/homeproxy/client.js:661
#: htdocs/luci-static/resources/view/homeproxy/client.js:1133
msgid "Rule set IP CIDR as source IP"
msgstr "规则集 IP CIDR 作为源 IP"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1352
msgid "Rule set URL"
msgstr "规则集 URL"
#: htdocs/luci-static/resources/view/homeproxy/client.js:623
#: htdocs/luci-static/resources/view/homeproxy/client.js:1110
#: htdocs/luci-static/resources/view/homeproxy/node.js:438
msgid "SSH"
msgstr "SSH"
#: htdocs/luci-static/resources/view/homeproxy/client.js:624
#: htdocs/luci-static/resources/view/homeproxy/client.js:1111
msgid "STUN"
msgstr "STUN"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1177
msgid "SUoT version"
msgstr "SUoT 版本"
#: htdocs/luci-static/resources/view/homeproxy/client.js:632
msgid "Safari / Apple Network API"
msgstr "Safari / Apple Network API"
#: htdocs/luci-static/resources/view/homeproxy/node.js:569
#: htdocs/luci-static/resources/view/homeproxy/server.js:290
msgid "Salamander"
msgstr "Salamander"
#: htdocs/luci-static/resources/view/homeproxy/client.js:152
msgid "Same as main node"
msgstr "保持与主节点一致"
#: htdocs/luci-static/resources/view/homeproxy/status.js:267
#: htdocs/luci-static/resources/view/homeproxy/status.js:273
msgid "Save"
msgstr "保存"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1397
msgid "Save current settings"
msgstr "保存当前设置"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1394
msgid "Save subscriptions settings"
msgstr "保存订阅设置"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1154
#: htdocs/luci-static/resources/view/homeproxy/server.js:156
msgid "Server"
msgstr "服务器"
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:30
msgid "Server Settings"
msgstr "服务器设置"
#: htdocs/luci-static/resources/view/homeproxy/server.js:607
msgid ""
"Server name to use when choosing a certificate if the ClientHello's "
"ServerName field is empty."
msgstr "当 ClientHello 的 ServerName 字段为空时,选择证书所使用的服务器名称。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:151
msgid "Server settings"
msgstr "服务器设置"
#: root/usr/share/luci/menu.d/luci-app-homeproxy.json:38
msgid "Service Status"
msgstr "服务状态"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1174
msgid "Set domain strategy for this query."
msgstr "为此查询设置域名策略。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:436
msgid "ShadowTLS"
msgstr "ShadowTLS"
#: htdocs/luci-static/resources/view/homeproxy/node.js:644
msgid "ShadowTLS version"
msgstr "ShadowTLS 版本"
#: htdocs/luci-static/resources/view/homeproxy/node.js:435
#: htdocs/luci-static/resources/view/homeproxy/server.js:184
msgid "Shadowsocks"
msgstr "Shadowsocks"
#: htdocs/luci-static/resources/view/homeproxy/client.js:628
msgid "Sniffed client type (QUIC client type or SSH client name)."
msgstr "嗅探到的客户端类型(QUIC 客户端类型或 SSH 客户端名称)。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:616
#: htdocs/luci-static/resources/view/homeproxy/client.js:1104
msgid ""
"Sniffed protocol, see <a target=\"_blank\" href=\"https://sing-box.sagernet."
"org/configuration/route/sniff/\">Sniff</a> for details."
msgstr ""
"嗅探协议,具体参见 <a target=\"_blank\" href=\"https://sing-box.sagernet.org/"
"configuration/route/sniff/\">Sniff</a>。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:437
#: htdocs/luci-static/resources/view/homeproxy/server.js:185
msgid "Socks"
msgstr "Socks"
#: htdocs/luci-static/resources/view/homeproxy/node.js:654
msgid "Socks version"
msgstr "Socks 版本"
#: htdocs/luci-static/resources/view/homeproxy/node.js:655
msgid "Socks4"
msgstr "Socks4"
#: htdocs/luci-static/resources/view/homeproxy/node.js:656
msgid "Socks4A"
msgstr "Socks4A"
#: htdocs/luci-static/resources/view/homeproxy/node.js:657
msgid "Socks5"
msgstr "Socks5"
#: htdocs/luci-static/resources/view/homeproxy/client.js:822
#: htdocs/luci-static/resources/view/homeproxy/client.js:1258
msgid "Source IP CIDR"
msgstr "源 IP CIDR"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1341
msgid "Source file"
msgstr "源文件"
#: htdocs/luci-static/resources/view/homeproxy/client.js:838
#: htdocs/luci-static/resources/view/homeproxy/client.js:1275
msgid "Source port"
msgstr "源端口"
#: htdocs/luci-static/resources/view/homeproxy/client.js:843
#: htdocs/luci-static/resources/view/homeproxy/client.js:1280
msgid "Source port range"
msgstr "源端口范围"
#: htdocs/luci-static/resources/view/homeproxy/client.js:750
msgid ""
"Specifies DNS server tag to use instead of selecting through DNS routing."
msgstr "指定使用的 DNS 服务器,而不是通过 DNS 规则选择。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:796
#: htdocs/luci-static/resources/view/homeproxy/node.js:848
msgid ""
"Specifies the period of time (in seconds) after which a health check will be "
"performed using a ping frame if no frames have been received on the "
"connection.<br/>Please note that a ping response is considered a received "
"frame, so if there is no other traffic on the connection, the health check "
"will be executed every interval."
msgstr ""
"如果连接上没有收到任何帧,指定一段时间(单位:秒)后将使用 PING 帧执行健康检"
"查。<br/>需要注意的是,PING 响应被视为已接收的帧,因此如果连接上没有其他流"
"量,则健康检查将在每个间隔执行一次。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:422
#: htdocs/luci-static/resources/view/homeproxy/server.js:457
msgid ""
"Specifies the time (in seconds) until idle clients should be closed with a "
"GOAWAY frame. PING frames are not considered as activity."
msgstr ""
"指定闲置客户端应在多长时间(单位:秒)内使用 GOAWAY 帧关闭。PING 帧不被视为活"
"动。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:800
#: htdocs/luci-static/resources/view/homeproxy/node.js:856
msgid ""
"Specifies the timeout duration (in seconds) after sending a PING frame, "
"within which a response must be received.<br/>If a response to the PING "
"frame is not received within the specified timeout duration, the connection "
"will be closed."
msgstr ""
"指定发送 PING 帧后,在指定的超时时间(单位:秒)内必须接收到响应。<br/>如果在"
"指定的超时时间内没有收到 PING 帧的响应,则连接将关闭。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:265
msgid ""
"Specify target ports to be proxied. Multiple ports must be separated by "
"commas."
msgstr "指定需要被代理的目标端口。多个端口必须用逗号隔开。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:913
msgid "Store RDRC"
msgstr "存储 RDRC"
#: htdocs/luci-static/resources/view/homeproxy/client.js:914
msgid ""
"Store rejected DNS response cache.<br/>The check results of <code>Address "
"filter DNS rule items</code> will be cached until expiration."
msgstr ""
"存储被拒绝的 DNS 响应缓存。<br/><code>地址过滤 DNS 规则</code> 的检查结果将被"
"缓存直到过期。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:557
#: htdocs/luci-static/resources/view/homeproxy/server.js:278
msgid "String"
msgstr "字符串"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1322
msgid "Sub (%s)"
msgstr "订阅(%s)"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1349
msgid "Subscription URL-s"
msgstr "订阅地址"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1333
msgid "Subscriptions"
msgstr "订阅"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1273
msgid "Successfully imported %s nodes of total %s."
msgstr "成功导入 %s 个节点,共 %s 个。"
#: htdocs/luci-static/resources/view/homeproxy/status.js:86
msgid "Successfully updated."
msgstr "更新成功。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1233
#: htdocs/luci-static/resources/view/homeproxy/node.js:1350
msgid ""
"Support Hysteria, Shadowsocks, Trojan, v2rayN (VMess), and XTLS (VLESS) "
"online configuration delivery standard."
msgstr ""
"支持 Hysteria、Shadowsocks、Trojan、v2rayN(VMess)和 XTLS(VLESS)在线配置交"
"付标准。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:180
msgid ""
"Support UDP, TCP, DoH, DoQ, DoT. TCP protocol will be used if not specified."
msgstr "支持 UDP、TCP、DoH、DoQ、DoT。如未指定则使用 TCP 协议。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:313
msgid "System"
msgstr "系统"
#: htdocs/luci-static/resources/view/homeproxy/client.js:383
#: htdocs/luci-static/resources/view/homeproxy/client.js:435
#: htdocs/luci-static/resources/view/homeproxy/client.js:757
#: htdocs/luci-static/resources/view/homeproxy/client.js:888
#: htdocs/luci-static/resources/view/homeproxy/client.js:995
#: htdocs/luci-static/resources/view/homeproxy/client.js:1161
msgid "System DNS"
msgstr "系统 DNS"
#: htdocs/luci-static/resources/view/homeproxy/client.js:638
#: htdocs/luci-static/resources/view/homeproxy/client.js:949
#: htdocs/luci-static/resources/view/homeproxy/client.js:1099
#: htdocs/luci-static/resources/view/homeproxy/server.js:867
msgid "TCP"
msgstr "TCP"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1161
#: htdocs/luci-static/resources/view/homeproxy/server.js:845
msgid "TCP fast open"
msgstr "TCP 快速打开"
#: htdocs/luci-static/resources/view/homeproxy/client.js:307
msgid "TCP/IP stack"
msgstr "TCP/IP 协议栈"
#: htdocs/luci-static/resources/view/homeproxy/client.js:308
msgid "TCP/IP stack."
msgstr "TCP/IP 协议栈。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:625
#: htdocs/luci-static/resources/view/homeproxy/client.js:950
#: htdocs/luci-static/resources/view/homeproxy/client.js:1112
#: htdocs/luci-static/resources/view/homeproxy/node.js:1003
#: htdocs/luci-static/resources/view/homeproxy/server.js:532
msgid "TLS"
msgstr "TLS"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1035
#: htdocs/luci-static/resources/view/homeproxy/server.js:565
msgid "TLS ALPN"
msgstr "TLS ALPN"
#: htdocs/luci-static/resources/view/homeproxy/client.js:979
#: htdocs/luci-static/resources/view/homeproxy/node.js:1030
#: htdocs/luci-static/resources/view/homeproxy/server.js:560
msgid "TLS SNI"
msgstr "TLS SNI"
#: htdocs/luci-static/resources/view/homeproxy/client.js:735
msgid "TLS fragment"
msgstr "TLS 分片"
#: htdocs/luci-static/resources/view/homeproxy/node.js:787
#: htdocs/luci-static/resources/view/homeproxy/server.js:413
msgid "TLS is not enforced. If TLS is not configured, plain HTTP 1.1 is used."
msgstr "不强制执行 TLS。如未配置 TLS,将使用纯 HTTP 1.1。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:729
#: htdocs/luci-static/resources/view/homeproxy/client.js:737
msgid "TLS record fragment"
msgstr "TLS 记录分片"
#: htdocs/luci-static/resources/view/homeproxy/client.js:988
msgid ""
"Tag of a another server to resolve the domain name in the address. Required "
"if address contains domain."
msgstr ""
"用于解析本 DNS 服务器的域名的另一个 DNS 服务器的标签。如果服务器地址包括域名"
"则必须。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1027
msgid "Tag of an outbound for connecting to the dns server."
msgstr "用于连接到 DNS 服务器的出站标签。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1375
msgid "Tag of the outbound to download rule set."
msgstr "用于下载规则集的出站标签。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1155
msgid "Tag of the target dns server."
msgstr "目标 DNS 服务器标签。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:679
msgid "Tag of the target outbound."
msgstr "目标出站标签。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:327
msgid ""
"Tell the client to use the BBR flow control algorithm instead of Hysteria CC."
msgstr "让客户端使用 BBR 流控算法。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:187
#: htdocs/luci-static/resources/view/homeproxy/client.js:222
msgid "Tencent Public DNS (119.29.29.29)"
msgstr "腾讯公共 DNS(119.29.29.29)"
#: htdocs/luci-static/resources/view/homeproxy/client.js:510
msgid "Test URL"
msgstr "测试 URL"
#: htdocs/luci-static/resources/view/homeproxy/client.js:138
#: htdocs/luci-static/resources/view/homeproxy/client.js:167
#: htdocs/luci-static/resources/view/homeproxy/client.js:530
msgid "Test interval"
msgstr "测试间隔"
#: htdocs/luci-static/resources/view/homeproxy/client.js:538
msgid "Test interval must be less or equal than idle timeout."
msgstr "测试间隔时间必须小于或等于空闲超时时间。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:144
#: htdocs/luci-static/resources/view/homeproxy/client.js:173
#: htdocs/luci-static/resources/view/homeproxy/client.js:546
msgid "Test tolerance"
msgstr "测试容差"
#: htdocs/luci-static/resources/view/homeproxy/server.js:628
msgid "The ACME CA provider to use."
msgstr "使用的 ACME CA 颁发机构。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:878
msgid "The DNS strategy for resolving the domain name in the address."
msgstr "解析域名的默认策略。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:600
#: htdocs/luci-static/resources/view/homeproxy/server.js:308
msgid "The QUIC connection-level flow control window for receiving data."
msgstr "用于接收数据的 QUIC 连接级流控制窗口。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:594
#: htdocs/luci-static/resources/view/homeproxy/server.js:301
msgid "The QUIC stream-level flow control window for receiving data."
msgstr "用于接收数据的 QUIC 流级流控制窗口。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:511
msgid "The URL to test."
msgstr "用于测试的 URL。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:958
msgid "The address of the dns server."
msgstr "DNS 服务器的地址。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:679
msgid ""
"The alternate port to use for the ACME HTTP challenge; if non-empty, this "
"port will be used instead of 80 to spin up a listener for the HTTP challenge."
msgstr ""
"用于 ACME HTTP 质询的备用端口;如果非空,将使用此端口而不是 80 来启动 HTTP 质"
"询的侦听器。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:685
msgid ""
"The alternate port to use for the ACME TLS-ALPN challenge; the system must "
"forward 443 to this port for challenge to succeed."
msgstr ""
"用于 ACME TLS-ALPN 质询的备用端口; 系统必须将 443 转发到此端口以使质询成功。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:596
msgid ""
"The default rule uses the following matching logic:<br/><code>(domain || "
"domain_suffix || domain_keyword || domain_regex || ip_cidr || "
"ip_is_private)</code> &&<br/><code>(port || port_range)</code> &&<br/"
"><code>(source_ip_cidr || source_ip_is_private)</code> &&<br/"
"><code>(source_port || source_port_range)</code> &&<br/><code>other fields</"
"code>.<br/>Additionally, included rule sets can be considered merged rather "
"than as a single rule sub-item."
msgstr ""
"默认规则使用以下匹配逻辑:<br/><code>(domain || domain_suffix || "
"domain_keyword || domain_regex || ip_cidr || ip_is_private)</code> &&<br/"
"><code>(port || port_range)</code> &&<br/><code>(source_ip_cidr || "
"source_ip_is_private)</code> &&<br/><code>(source_port || "
"source_port_range)</code> &&<br/><code>其他字段</code>。此外,包含的所有规则"
"集会被合并而不是独立生效。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1075
msgid ""
"The default rule uses the following matching logic:<br/><code>(domain || "
"domain_suffix || domain_keyword || domain_regex)</code> &&<br/><code>(port "
"|| port_range)</code> &&<br/><code>(source_ip_cidr || source_ip_is_private)</"
"code> &&<br/><code>(source_port || source_port_range)</code> &&<br/"
"><code>other fields</code>.<br/>Additionally, included rule sets can be "
"considered merged rather than as a single rule sub-item."
msgstr ""
"默认规则使用以下匹配逻辑:<br/><code>(domain || domain_suffix || "
"domain_keyword || domain_regex)</code> &&<br/><code>(port || port_range)</"
"code> &&<br/><code>(source_ip_cidr || source_ip_is_private)</code> &&<br/"
"><code>(source_port || source_port_range)</code> &&<br/><code>其他字段</"
"code>。此外,包含的所有规则集会被合并而不是独立生效。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:218
msgid ""
"The dns server for resolving China domains. Support UDP, TCP, DoH, DoQ, DoT."
msgstr "用于解析国内域名的 DNS 服务器。支持 UDP、TCP、DoH、DoQ、DoT。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:447
#: htdocs/luci-static/resources/view/homeproxy/client.js:1020
msgid "The domain strategy for resolving the domain name in the address."
msgstr "用于解析本 DNS 服务器的域名的策略。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1065
#: htdocs/luci-static/resources/view/homeproxy/server.js:587
msgid ""
"The elliptic curves that will be used in an ECDHE handshake, in preference "
"order. If empty, the default will be used."
msgstr "将在 ECDHE 握手中使用的椭圆曲线,按优先顺序排列。留空使用默认值。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:613
msgid ""
"The email address to use when creating or selecting an existing ACME server "
"account."
msgstr "创建或选择现有 ACME 服务器帐户时使用的电子邮件地址。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:743
msgid ""
"The fallback value in milliseconds used when TLS segmentation cannot "
"automatically determine the wait time."
msgstr "当 TLS 分段无法自动确定等待时间时使用的回退值(单位:毫秒)。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:554
msgid "The idle timeout in seconds."
msgstr "空闲超时时间(单位:秒)。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1057
#: htdocs/luci-static/resources/view/homeproxy/server.js:579
msgid "The maximum TLS version that is acceptable."
msgstr "可接受的最高 TLS 版本。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:315
msgid ""
"The maximum number of QUIC concurrent bidirectional streams that a peer is "
"allowed to open."
msgstr "允许对等点打开的 QUIC 并发双向流的最大数量。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:740
msgid "The maximum time difference between the server and the client."
msgstr "服务器和客户端之间的最大时间差。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1049
#: htdocs/luci-static/resources/view/homeproxy/server.js:571
msgid "The minimum TLS version that is acceptable."
msgstr "可接受的最低 TLS 版本。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:102
#: htdocs/luci-static/resources/view/homeproxy/server.js:130
msgid "The modern ImmortalWrt proxy platform for ARM64/AMD64."
msgstr "为 ARM64/AMD64 设计的现代 ImmortalWrt 代理平台。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:454
#: htdocs/luci-static/resources/view/homeproxy/server.js:875
msgid "The network interface to bind to."
msgstr "绑定到的网络接口。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:967
msgid "The path of the DNS server."
msgstr "DNS 服务器的路径。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1099
msgid ""
"The path to the ECH config, in PEM format. If empty, load from DNS will be "
"attempted."
msgstr "PEM 格式的 ECH 配置路径。如果为空,将尝试从 DNS 加载。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1078
msgid "The path to the server certificate, in PEM format."
msgstr "服务端证书路径,需要 PEM 格式。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:199
msgid "The port must be unique."
msgstr "必须是唯一端口。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:963
msgid "The port of the DNS server."
msgstr "DNS 服务器的端口。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1215
msgid "The response code."
msgstr "响应代码。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:776
msgid "The server private key, in PEM format."
msgstr "服务端私钥,需要 PEM 格式。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:757
msgid "The server public key, in PEM format."
msgstr "服务端公钥,需要 PEM 格式。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:461
msgid ""
"The tag of the upstream outbound.<br/>Other dial fields will be ignored when "
"enabled."
msgstr "上游出站的标签。<br/>启用时,其他拨号字段将被忽略。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:139
#: htdocs/luci-static/resources/view/homeproxy/client.js:168
#: htdocs/luci-static/resources/view/homeproxy/client.js:531
msgid "The test interval in seconds."
msgstr "测试间隔时间(单位:秒)。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:145
#: htdocs/luci-static/resources/view/homeproxy/client.js:174
#: htdocs/luci-static/resources/view/homeproxy/client.js:547
msgid "The test tolerance in milliseconds."
msgstr "测试容差时间(单位:毫秒)。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:807
#: htdocs/luci-static/resources/view/homeproxy/server.js:465
msgid ""
"The timeout (in seconds) that after performing a keepalive check, the client "
"will wait for activity. If no activity is detected, the connection will be "
"closed."
msgstr ""
"经过一段时间(单位:秒)之后,客户端将执行 keepalive 检查并等待活动。如果没有"
"检测到任何活动,则会关闭连接。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1043
#: htdocs/luci-static/resources/view/homeproxy/node.js:1385
msgid ""
"This is <strong>DANGEROUS</strong>, your traffic is almost like "
"<strong>PLAIN TEXT</strong>! Use at your own risk!"
msgstr ""
"这是危险行为,您的流量将几乎等同于<strong>明文</strong>!使用风险自负!"
#: htdocs/luci-static/resources/view/homeproxy/node.js:717
msgid ""
"This is the TUIC port of the UDP over TCP protocol, designed to provide a "
"QUIC stream based UDP relay mode that TUIC does not provide."
msgstr ""
"这是 TUIC 的 UDP over TCP 协议移植, 旨在提供 TUIC 不提供的基于 QUIC 流的 "
"UDP 中继模式。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:188
#: htdocs/luci-static/resources/view/homeproxy/client.js:223
msgid "ThreatBook Public DNS (117.50.10.10)"
msgstr "微步在线公共 DNS(117.50.10.10)"
#: htdocs/luci-static/resources/view/homeproxy/client.js:723
msgid ""
"Timeout for UDP connections.<br/>Setting a larger value than the UDP timeout "
"in inbounds will have no effect."
msgstr "UDP 连接的超时时间。<br/>设置比入站的 UDP 超时更大的值将无效。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:918
msgid ""
"Timeout of rejected DNS response cache in seconds. <code>604800 (7d)</code> "
"is used by default."
msgstr ""
"被拒绝的 DNS 响应缓存超时时间(单位:秒)。默认为 <code>604800(7 天)</"
"code>。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:491
msgid ""
"To be compatible with Xray-core, set this to <code>Sec-WebSocket-Protocol</"
"code>."
msgstr ""
"要与 Xray-core 兼容,请将其设置为 <code>Sec-WebSocket-Protocol</code>。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:292
msgid ""
"To enable Tun support, you need to install <code>ip-full</code> and "
"<code>kmod-tun</code>"
msgstr ""
"要启用 Tun 支持,您需要安装 <code>ip-full</code> 和 <code>kmod-tun</code>。"
#: htdocs/luci-static/resources/view/homeproxy/status.js:135
msgid "Trace"
msgstr "跟踪"
#: htdocs/luci-static/resources/view/homeproxy/node.js:773
#: htdocs/luci-static/resources/view/homeproxy/server.js:399
msgid "Transport"
msgstr "传输层"
#: htdocs/luci-static/resources/view/homeproxy/node.js:439
#: htdocs/luci-static/resources/view/homeproxy/server.js:186
msgid "Trojan"
msgstr "Trojan"
#: htdocs/luci-static/resources/view/homeproxy/node.js:441
#: htdocs/luci-static/resources/view/homeproxy/server.js:188
msgid "Tuic"
msgstr "Tuic"
#: htdocs/luci-static/resources/view/homeproxy/client.js:290
msgid "Tun TCP/UDP"
msgstr "Tun TCP/UDP"
#: htdocs/luci-static/resources/view/homeproxy/client.js:947
#: htdocs/luci-static/resources/view/homeproxy/client.js:1333
#: htdocs/luci-static/resources/view/homeproxy/node.js:427
#: htdocs/luci-static/resources/view/homeproxy/server.js:175
msgid "Type"
msgstr "类型"
#: htdocs/luci-static/resources/view/homeproxy/client.js:639
#: htdocs/luci-static/resources/view/homeproxy/client.js:948
#: htdocs/luci-static/resources/view/homeproxy/client.js:1100
#: htdocs/luci-static/resources/view/homeproxy/server.js:868
msgid "UDP"
msgstr "UDP"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1167
#: htdocs/luci-static/resources/view/homeproxy/server.js:854
msgid "UDP Fragment"
msgstr "UDP 分片"
#: htdocs/luci-static/resources/view/homeproxy/client.js:335
#: htdocs/luci-static/resources/view/homeproxy/server.js:859
msgid "UDP NAT expiration time"
msgstr "UDP NAT 过期时间"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1171
msgid "UDP over TCP"
msgstr "UDP over TCP"
#: htdocs/luci-static/resources/view/homeproxy/node.js:716
msgid "UDP over stream"
msgstr "UDP over stream"
#: htdocs/luci-static/resources/view/homeproxy/node.js:709
msgid "UDP packet relay mode."
msgstr "UDP 包中继模式。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:708
msgid "UDP relay mode"
msgstr "UDP 中继模式"
#: htdocs/luci-static/resources/view/homeproxy/client.js:722
msgid "UDP timeout"
msgstr "UDP 超时"
#: htdocs/luci-static/resources/view/homeproxy/client.js:124
#: htdocs/luci-static/resources/view/homeproxy/client.js:153
#: htdocs/luci-static/resources/view/homeproxy/client.js:421
msgid "URLTest"
msgstr "URLTest"
#: htdocs/luci-static/resources/view/homeproxy/client.js:131
#: htdocs/luci-static/resources/view/homeproxy/client.js:160
#: htdocs/luci-static/resources/view/homeproxy/client.js:496
msgid "URLTest nodes"
msgstr "URLTest 节点"
#: htdocs/luci-static/resources/view/homeproxy/node.js:690
#: htdocs/luci-static/resources/view/homeproxy/server.js:346
msgid "UUID"
msgstr "UUID"
#: htdocs/luci-static/resources/view/homeproxy/status.js:98
msgid "Unknown error."
msgstr "未知错误。"
#: htdocs/luci-static/resources/view/homeproxy/status.js:195
msgid "Unknown error: %s"
msgstr "未知错误:%s"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1135
msgid "Unsupported fingerprint!"
msgstr "不支持的指纹!"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1409
msgid "Update %s subscriptions"
msgstr "更新 %s 个订阅"
#: htdocs/luci-static/resources/view/homeproxy/status.js:89
msgid "Update failed."
msgstr "更新失败。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1391
msgid "Update interval"
msgstr "更新间隔"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1392
msgid "Update interval of rule set."
msgstr "规则集更新间隔。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1404
msgid "Update nodes from subscriptions"
msgstr "从订阅更新节点"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1346
msgid "Update subscriptions via proxy."
msgstr "使用代理更新订阅。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1339
msgid "Update time"
msgstr "更新时间"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1345
msgid "Update via proxy"
msgstr "使用代理更新"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1104
msgid "Upload ECH config"
msgstr "上传 ECH 配置"
#: htdocs/luci-static/resources/view/homeproxy/node.js:995
#: htdocs/luci-static/resources/view/homeproxy/server.js:523
msgid "Upload bandwidth"
msgstr "上传带宽"
#: htdocs/luci-static/resources/view/homeproxy/node.js:996
#: htdocs/luci-static/resources/view/homeproxy/server.js:524
msgid "Upload bandwidth in Mbps."
msgstr "上传带宽(单位:Mbps)。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1085
#: htdocs/luci-static/resources/view/homeproxy/server.js:767
msgid "Upload certificate"
msgstr "上传证书"
#: htdocs/luci-static/resources/view/homeproxy/server.js:786
msgid "Upload key"
msgstr "上传密钥"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1088
#: htdocs/luci-static/resources/view/homeproxy/node.js:1107
#: htdocs/luci-static/resources/view/homeproxy/server.js:770
#: htdocs/luci-static/resources/view/homeproxy/server.js:789
msgid "Upload..."
msgstr "上传..."
#: htdocs/luci-static/resources/view/homeproxy/server.js:596
msgid "Use ACME TLS certificate issuer."
msgstr "使用 ACME TLS 证书颁发机构。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1031
#: htdocs/luci-static/resources/view/homeproxy/server.js:561
msgid ""
"Used to verify the hostname on the returned certificates unless insecure is "
"given."
msgstr "用于验证返回证书上的主机名。如允许不安全连接,此配置无效。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:980
msgid "Used to verify the hostname on the returned certificates."
msgstr "用于验证返回证书上的主机名。"
#: htdocs/luci-static/resources/view/homeproxy/client.js:642
#: htdocs/luci-static/resources/view/homeproxy/client.js:1114
msgid "User"
msgstr "用户"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1379
msgid "User-Agent"
msgstr "用户代理"
#: htdocs/luci-static/resources/view/homeproxy/node.js:458
#: htdocs/luci-static/resources/view/homeproxy/server.js:203
msgid "Username"
msgstr "用户名"
#: htdocs/luci-static/resources/view/homeproxy/node.js:444
#: htdocs/luci-static/resources/view/homeproxy/server.js:189
msgid "VLESS"
msgstr "VLESS"
#: htdocs/luci-static/resources/view/homeproxy/node.js:445
#: htdocs/luci-static/resources/view/homeproxy/server.js:190
msgid "VMess"
msgstr "VMess"
#: htdocs/luci-static/resources/view/homeproxy/client.js:181
#: htdocs/luci-static/resources/view/homeproxy/client.js:219
msgid "WAN DNS (read from interface)"
msgstr "WAN DNS(从接口获取)"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1463
msgid "WAN IP Policy"
msgstr "WAN IP 策略"
#: htdocs/luci-static/resources/view/homeproxy/status.js:138
msgid "Warn"
msgstr "警告"
#: htdocs/luci-static/resources/view/homeproxy/node.js:780
#: htdocs/luci-static/resources/view/homeproxy/server.js:406
msgid "WebSocket"
msgstr "WebSocket"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1370
msgid "Whitelist mode"
msgstr "白名单模式"
#: htdocs/luci-static/resources/view/homeproxy/node.js:443
msgid "WireGuard"
msgstr "WireGuard"
#: htdocs/luci-static/resources/view/homeproxy/node.js:912
msgid "WireGuard peer public key."
msgstr "WireGuard 对端公钥。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:919
msgid "WireGuard pre-shared key."
msgstr "WireGuard 预共享密钥。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:904
msgid "WireGuard requires base64-encoded private keys."
msgstr "WireGuard 要求 base64 编码的私钥。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:498
msgid "Write proxy protocol in the connection header."
msgstr "在连接头中写入代理协议。"
#: htdocs/luci-static/resources/view/homeproxy/node.js:889
#: htdocs/luci-static/resources/view/homeproxy/node.js:1392
msgid "Xudp (Xray-core)"
msgstr "Xudp (Xray-core)"
#: htdocs/luci-static/resources/homeproxy.js:259
msgid "Your %s was successfully uploaded. Size: %sB."
msgstr "您的 %s 已成功上传。大小:%sB。"
#: htdocs/luci-static/resources/view/homeproxy/server.js:630
msgid "ZeroSSL"
msgstr "ZeroSSL"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1090
#: htdocs/luci-static/resources/view/homeproxy/server.js:772
msgid "certificate"
msgstr "证书"
#: htdocs/luci-static/resources/view/homeproxy/client.js:716
msgid "connect UDP connections"
msgstr "主动连接 UDP"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1050
#: htdocs/luci-static/resources/view/homeproxy/node.js:1058
#: htdocs/luci-static/resources/view/homeproxy/server.js:572
#: htdocs/luci-static/resources/view/homeproxy/server.js:580
msgid "default"
msgstr "默认"
#: htdocs/luci-static/resources/view/homeproxy/status.js:53
msgid "failed"
msgstr "失败"
#: htdocs/luci-static/resources/view/homeproxy/node.js:776
#: htdocs/luci-static/resources/view/homeproxy/server.js:402
msgid "gRPC"
msgstr "gRPC"
#: htdocs/luci-static/resources/view/homeproxy/node.js:818
msgid "gRPC permit without stream"
msgstr "gRPC 允许无活动连接"
#: htdocs/luci-static/resources/view/homeproxy/node.js:813
#: htdocs/luci-static/resources/view/homeproxy/server.js:430
msgid "gRPC service name"
msgstr "gRPC 服务名称"
#: htdocs/luci-static/resources/view/homeproxy/client.js:311
msgid "gVisor"
msgstr "gVisor"
#: htdocs/luci-static/resources/homeproxy.js:306
#: htdocs/luci-static/resources/homeproxy.js:326
#: htdocs/luci-static/resources/view/homeproxy/client.js:195
#: htdocs/luci-static/resources/view/homeproxy/client.js:230
#: htdocs/luci-static/resources/view/homeproxy/client.js:504
#: htdocs/luci-static/resources/view/homeproxy/client.js:1356
#: htdocs/luci-static/resources/view/homeproxy/node.js:488
#: htdocs/luci-static/resources/view/homeproxy/node.js:1131
#: htdocs/luci-static/resources/view/homeproxy/server.js:235
msgid "non-empty value"
msgstr "非空值"
#: htdocs/luci-static/resources/view/homeproxy/node.js:631
#: htdocs/luci-static/resources/view/homeproxy/node.js:887
#: htdocs/luci-static/resources/view/homeproxy/node.js:1390
msgid "none"
msgstr "无"
#: htdocs/luci-static/resources/view/homeproxy/node.js:888
#: htdocs/luci-static/resources/view/homeproxy/node.js:1391
msgid "packet addr (v2ray-core v5+)"
msgstr "packet addr (v2ray-core v5+)"
#: htdocs/luci-static/resources/view/homeproxy/status.js:50
msgid "passed"
msgstr "通过"
#: htdocs/luci-static/resources/view/homeproxy/server.js:791
msgid "private key"
msgstr "私钥"
#: htdocs/luci-static/resources/view/homeproxy/client.js:631
msgid "quic-go / uquic chrome"
msgstr "quic-go / uquic chrome"
#: htdocs/luci-static/resources/view/homeproxy/status.js:285
msgid "sing-box client"
msgstr "sing-box 客户端"
#: htdocs/luci-static/resources/view/homeproxy/status.js:288
msgid "sing-box server"
msgstr "sing-box 服务端"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1113
msgid "uTLS fingerprint"
msgstr "uTLS 指纹"
#: htdocs/luci-static/resources/view/homeproxy/node.js:1114
msgid ""
"uTLS is a fork of \"crypto/tls\", which provides ClientHello fingerprinting "
"resistance."
msgstr ""
"uTLS 是 \"crypto/tls\" 的一个分支,拥有抵抗 ClientHello 指纹识别的能力。"
#: htdocs/luci-static/resources/view/homeproxy/status.js:59
msgid "unchecked"
msgstr "未检查"
#: htdocs/luci-static/resources/homeproxy.js:237
#: htdocs/luci-static/resources/view/homeproxy/node.js:1302
msgid "unique UCI identifier"
msgstr "独立 UCI 标识"
#: htdocs/luci-static/resources/homeproxy.js:317
msgid "unique value"
msgstr "独立值"
#: htdocs/luci-static/resources/view/homeproxy/node.js:500
#: htdocs/luci-static/resources/view/homeproxy/node.js:645
#: htdocs/luci-static/resources/view/homeproxy/node.js:1178
msgid "v1"
msgstr "v1"
#: htdocs/luci-static/resources/view/homeproxy/node.js:501
#: htdocs/luci-static/resources/view/homeproxy/node.js:646
#: htdocs/luci-static/resources/view/homeproxy/node.js:1179
msgid "v2"
msgstr "v2"
#: htdocs/luci-static/resources/view/homeproxy/node.js:647
msgid "v3"
msgstr "v3"
#: htdocs/luci-static/resources/view/homeproxy/client.js:207
#: htdocs/luci-static/resources/view/homeproxy/client.js:211
#: htdocs/luci-static/resources/view/homeproxy/client.js:241
#: htdocs/luci-static/resources/view/homeproxy/client.js:245
msgid "valid DNS server address"
msgstr "有效 DNS 服务器地址"
#: htdocs/luci-static/resources/view/homeproxy/client.js:518
#: htdocs/luci-static/resources/view/homeproxy/client.js:521
#: htdocs/luci-static/resources/view/homeproxy/client.js:1361
#: htdocs/luci-static/resources/view/homeproxy/client.js:1364
#: htdocs/luci-static/resources/view/homeproxy/node.js:1356
#: htdocs/luci-static/resources/view/homeproxy/node.js:1359
msgid "valid URL"
msgstr "有效网址"
#: htdocs/luci-static/resources/homeproxy.js:271
msgid "valid base64 key with %d characters"
msgstr "包含 %d 个字符的有效 base64 密钥"
#: htdocs/luci-static/resources/view/homeproxy/client.js:1506
#: htdocs/luci-static/resources/view/homeproxy/client.js:1538
msgid "valid hostname"
msgstr "有效主机名"
#: htdocs/luci-static/resources/homeproxy.js:297
msgid "valid port range (port1:port2)"
msgstr "有效端口范围(port1:port2)"
#: htdocs/luci-static/resources/view/homeproxy/client.js:274
msgid "valid port value"
msgstr "有效端口值"
#: htdocs/luci-static/resources/homeproxy.js:328
msgid "valid uuid"
msgstr "有效 uuid"
|
2977094657/BilibiliHistoryFetcher
| 34,999
|
routers/popular_analytics.py
|
import sqlite3
from typing import Optional
from fastapi import APIRouter, Query, HTTPException
from scripts.utils import load_config, get_output_path
router = APIRouter()
config = load_config()
def get_db():
"""获取数据库连接"""
db_path = get_output_path(config['db_file'])
return sqlite3.connect(db_path)
def validate_year_and_get_table(year: Optional[int]) -> tuple:
"""验证年份并获取对应的表名"""
from scripts.analyze_bilibili_history import get_available_years
# 获取可用年份列表
available_years = get_available_years()
if not available_years:
return None, None, {
"status": "error",
"message": "未找到任何历史记录数据"
}
# 如果未指定年份,使用最新的年份
target_year = year if year is not None else available_years[0]
# 检查指定的年份是否可用
if year is not None and year not in available_years:
return None, None, {
"status": "error",
"message": f"未找到 {year} 年的历史记录数据。可用的年份有:{', '.join(map(str, available_years))}"
}
table_name = f"bilibili_history_{target_year}"
return table_name, target_year, available_years
def analyze_popular_hit_rate(cursor, table_name: str, target_year: int) -> dict:
"""分析热门视频命中率"""
# 1. 获取用户观看的所有视频
cursor.execute(f"""
SELECT DISTINCT bvid, title, author_name, view_at, duration, progress
FROM {table_name}
WHERE bvid IS NOT NULL AND bvid != ''
ORDER BY view_at ASC
""")
user_videos = cursor.fetchall()
total_watched = len(user_videos)
if total_watched == 0:
return {
"total_watched": 0,
"popular_hit_count": 0,
"hit_rate": 0,
"insights": ["本年度没有观看记录"]
}
# 2. 获取热门视频数据库连接
popular_connections = {}
try:
# 获取所有年份的热门视频数据库连接
from scripts.popular_videos import get_multi_year_connections
popular_connections = get_multi_year_connections()
except Exception as e:
print(f"获取热门视频数据库连接失败: {e}")
return {
"total_watched": total_watched,
"popular_hit_count": 0,
"hit_rate": 0,
"insights": [f"观看了 {total_watched} 个视频,但无法获取热门视频数据进行对比"]
}
# 3. 检查哪些视频曾经是热门,并获取时间信息
popular_hits = []
popular_bvids = set()
popular_video_times = {} # bvid -> {"pubdate": timestamp}
for year, conn in popular_connections.items():
try:
pop_cursor = conn.cursor()
# 查询所有热门视频的bvid和发布时间
pop_cursor.execute("SELECT DISTINCT bvid, pubdate FROM popular_videos WHERE bvid IS NOT NULL AND pubdate IS NOT NULL")
for row in pop_cursor.fetchall():
bvid, pubdate = row
popular_bvids.add(bvid)
if bvid not in popular_video_times:
popular_video_times[bvid] = {"pubdate": pubdate}
except Exception as e:
print(f"查询 {year} 年热门视频数据失败: {e}")
continue
# 4. 统计命中的热门视频并分析观看时机
time_patterns = {
"immediate_watch": 0, # 发布后立即观看(7天内)
"trending_watch": 0, # 热门期观看(7天后)
"unknown_timing": 0 # 无法确定时机
}
import datetime
for video in user_videos:
bvid = video[0]
view_timestamp = video[3]
if bvid in popular_bvids:
popular_hits.append({
"bvid": bvid,
"title": video[1],
"author": video[2],
"view_at": video[3],
"duration": video[4],
"progress": video[5]
})
# 分析观看时机
if bvid in popular_video_times and popular_video_times[bvid]["pubdate"]:
try:
view_date = datetime.datetime.fromtimestamp(view_timestamp)
pub_date = datetime.datetime.fromtimestamp(popular_video_times[bvid]["pubdate"])
days_diff = (view_date - pub_date).days
if days_diff <= 7:
time_patterns["immediate_watch"] += 1
else:
time_patterns["trending_watch"] += 1
except Exception as e:
print(f"处理视频 {bvid} 时间数据失败: {e}")
time_patterns["unknown_timing"] += 1
else:
time_patterns["unknown_timing"] += 1
hit_count = len(popular_hits)
hit_rate = (hit_count / total_watched) * 100 if total_watched > 0 else 0
# 5. 生成洞察
insights = []
insights.append(f"今年观看了 {total_watched} 个视频")
insights.append(f"其中 {hit_count} 个曾经是热门视频")
insights.append(f"热门视频命中率为 {hit_rate:.1f}%")
# 添加观看时机洞察
total_timed_videos = sum([time_patterns["immediate_watch"], time_patterns["trending_watch"]])
if total_timed_videos > 0:
immediate_rate = (time_patterns["immediate_watch"] / total_timed_videos) * 100
if immediate_rate >= 50:
insights.append("你是热门视频的早期发现者")
else:
insights.append("你喜欢在视频热门期观看")
if hit_rate >= 50:
insights.append("你很喜欢追热门内容!")
elif hit_rate >= 30:
insights.append("你对热门内容有一定关注")
elif hit_rate >= 10:
insights.append("你更偏爱小众内容")
else:
insights.append("你是真正的小众爱好者!")
# 6. 关闭热门视频数据库连接
for conn in popular_connections.values():
if conn:
conn.close()
return {
"total_watched": total_watched,
"popular_hit_count": hit_count,
"hit_rate": round(hit_rate, 2),
"popular_videos": popular_hits[:10], # 只返回前10个热门视频
"time_pattern_analysis": time_patterns,
"insights": insights
}
def analyze_popular_prediction_ability(cursor, table_name: str, target_year: int) -> dict:
"""分析热门预测能力"""
# 1. 获取用户观看的所有视频(按观看时间排序)
cursor.execute(f"""
SELECT DISTINCT bvid, title, author_name, view_at, duration, progress
FROM {table_name}
WHERE bvid IS NOT NULL AND bvid != ''
ORDER BY view_at ASC
""")
user_videos = cursor.fetchall()
total_watched = len(user_videos)
if total_watched == 0:
return {
"total_watched": 0,
"predicted_count": 0,
"prediction_rate": 0,
"insights": ["本年度没有观看记录"]
}
# 2. 获取热门视频数据库连接
popular_connections = {}
try:
from scripts.popular_videos import get_multi_year_connections
popular_connections = get_multi_year_connections()
except Exception as e:
print(f"获取热门视频数据库连接失败: {e}")
return {
"total_watched": total_watched,
"predicted_count": 0,
"prediction_rate": 0,
"insights": [f"观看了 {total_watched} 个视频,但无法获取热门视频数据进行预测分析"]
}
# 3. 分析每个观看的视频是否后来成为热门
predicted_videos = []
for video in user_videos:
bvid = video[0]
view_timestamp = video[3] # 用户观看时间戳
# 在所有年份的热门视频数据库中查找该视频
for year, conn in popular_connections.items():
try:
pop_cursor = conn.cursor()
# 查询该视频在热门列表中的首次出现时间
pop_cursor.execute("""
SELECT first_seen, title, highest_rank, appearances
FROM popular_video_tracking
WHERE bvid = ?
""", (bvid,))
tracking_result = pop_cursor.fetchone()
if tracking_result:
first_seen_timestamp = tracking_result[0]
video_title = tracking_result[1]
highest_rank = tracking_result[2]
appearances = tracking_result[3]
# 如果用户观看时间早于视频首次成为热门的时间,说明预测成功
if view_timestamp < first_seen_timestamp:
# 计算预测提前时间(天数)
advance_days = (first_seen_timestamp - view_timestamp) / (24 * 3600)
predicted_videos.append({
"bvid": bvid,
"title": video_title or video[1],
"author": video[2],
"view_at": video[3],
"became_popular_at": first_seen_timestamp,
"advance_days": round(advance_days, 1),
"highest_rank": highest_rank,
"appearances": appearances
})
break # 找到就跳出年份循环
except Exception as e:
print(f"查询 {year} 年热门视频跟踪数据失败: {e}")
continue
predicted_count = len(predicted_videos)
prediction_rate = (predicted_count / total_watched) * 100 if total_watched > 0 else 0
# 4. 生成洞察
insights = []
insights.append(f"今年观看了 {total_watched} 个视频")
insights.append(f"其中 {predicted_count} 个后来成为了热门视频")
insights.append(f"热门预测成功率为 {prediction_rate:.1f}%")
if prediction_rate >= 10:
insights.append("你有超强的慧眼识珠能力!")
elif prediction_rate >= 5:
insights.append("你对优质内容很有嗅觉")
elif prediction_rate >= 2:
insights.append("你偶尔能发现潜力视频")
else:
insights.append("你更专注于自己的兴趣领域")
# 5. 计算平均提前天数
if predicted_videos:
avg_advance_days = sum(v["advance_days"] for v in predicted_videos) / len(predicted_videos)
insights.append(f"平均提前 {avg_advance_days:.1f} 天发现热门视频")
# 6. 关闭热门视频数据库连接
for conn in popular_connections.values():
if conn:
conn.close()
return {
"total_watched": total_watched,
"predicted_count": predicted_count,
"prediction_rate": round(prediction_rate, 2),
"predicted_videos": sorted(predicted_videos, key=lambda x: x["advance_days"], reverse=True)[:10],
"insights": insights
}
def analyze_author_popular_association(cursor, table_name: str, target_year: int) -> dict:
"""分析UP主热门关联"""
# 1. 获取用户观看的所有UP主及其视频
cursor.execute(f"""
SELECT author_name, bvid, title, view_at, duration, progress
FROM {table_name}
WHERE bvid IS NOT NULL AND bvid != '' AND author_name IS NOT NULL AND author_name != ''
ORDER BY author_name, view_at ASC
""")
user_videos = cursor.fetchall()
if not user_videos:
return {
"total_authors": 0,
"popular_authors": [],
"author_stats": [],
"insights": ["本年度没有观看记录"]
}
# 2. 按UP主分组统计
author_videos = {}
for video in user_videos:
author_name = video[0]
if author_name not in author_videos:
author_videos[author_name] = []
author_videos[author_name].append({
"bvid": video[1],
"title": video[2],
"view_at": video[3],
"duration": video[4],
"progress": video[5]
})
# 3. 获取热门视频数据库连接
popular_connections = {}
try:
from scripts.popular_videos import get_multi_year_connections
popular_connections = get_multi_year_connections()
except Exception as e:
print(f"获取热门视频数据库连接失败: {e}")
return {
"total_authors": len(author_videos),
"popular_authors": [],
"author_stats": [],
"insights": [f"观看了 {len(author_videos)} 个UP主的视频,但无法获取热门视频数据进行分析"]
}
# 4. 获取所有热门视频的bvid和作者信息
popular_bvids = set()
popular_video_authors = {} # bvid -> author_name
for year, conn in popular_connections.items():
try:
pop_cursor = conn.cursor()
# 查询所有热门视频的bvid和作者
pop_cursor.execute("SELECT DISTINCT bvid, owner_name FROM popular_videos WHERE bvid IS NOT NULL AND owner_name IS NOT NULL")
for row in pop_cursor.fetchall():
bvid, owner_name = row
popular_bvids.add(bvid)
popular_video_authors[bvid] = owner_name
except Exception as e:
print(f"查询 {year} 年热门视频数据失败: {e}")
continue
# 5. 分析每个UP主的热门视频产出能力
author_stats = []
for author_name, videos in author_videos.items():
total_videos = len(videos)
popular_videos = []
# 检查该UP主的哪些视频成为了热门
for video in videos:
bvid = video["bvid"]
if bvid in popular_bvids:
popular_videos.append({
"bvid": bvid,
"title": video["title"],
"view_at": video["view_at"]
})
popular_count = len(popular_videos)
popular_rate = (popular_count / total_videos) * 100 if total_videos > 0 else 0
# 计算该UP主在热门视频数据库中的总热门视频数
author_total_popular = 0
for bvid, owner_name in popular_video_authors.items():
if owner_name == author_name:
author_total_popular += 1
author_stats.append({
"author_name": author_name,
"total_videos_watched": total_videos,
"popular_videos_watched": popular_count,
"popular_rate": round(popular_rate, 2),
"total_popular_videos": author_total_popular,
"popular_videos": popular_videos[:5], # 只返回前5个热门视频
"efficiency_score": round(popular_rate * (popular_count + 1), 2) # 综合评分
})
# 6. 按热门视频数量和热门率排序
author_stats.sort(key=lambda x: (x["popular_videos_watched"], x["popular_rate"]), reverse=True)
# 7. 筛选出热门制造机UP主(至少有1个热门视频)
popular_authors = [author for author in author_stats if author["popular_videos_watched"] > 0]
# 8. 生成洞察
insights = []
total_authors = len(author_videos)
popular_author_count = len(popular_authors)
insights.append(f"观看了 {total_authors} 个UP主的视频")
insights.append(f"其中 {popular_author_count} 个UP主制作过热门视频")
if popular_author_count > 0:
avg_popular_rate = sum(author["popular_rate"] for author in popular_authors) / popular_author_count
insights.append(f"热门UP主平均热门率为 {avg_popular_rate:.1f}%")
top_author = popular_authors[0]
insights.append(f"最强热门制造机:{top_author['author_name']}({top_author['popular_videos_watched']}个热门视频)")
else:
insights.append("你关注的UP主都很小众哦")
# 9. 关闭热门视频数据库连接
for conn in popular_connections.values():
if conn:
conn.close()
return {
"total_authors": total_authors,
"popular_author_count": popular_author_count,
"popular_authors": popular_authors[:20], # 返回前20个热门UP主
"author_stats": author_stats[:10], # 返回前10个UP主的详细统计
"insights": insights
}
def analyze_category_popular_distribution(cursor, table_name: str, target_year: int) -> dict:
"""分析热门视频分区分布"""
# 1. 获取用户观看的所有视频
cursor.execute(f"""
SELECT DISTINCT bvid, title, author_name, view_at, duration, progress
FROM {table_name}
WHERE bvid IS NOT NULL AND bvid != ''
ORDER BY view_at ASC
""")
user_videos = cursor.fetchall()
total_watched = len(user_videos)
if total_watched == 0:
return {
"total_watched": 0,
"category_stats": [],
"popular_categories": [],
"insights": ["本年度没有观看记录"]
}
# 2. 获取热门视频数据库连接
popular_connections = {}
try:
from scripts.popular_videos import get_multi_year_connections
popular_connections = get_multi_year_connections()
except Exception as e:
print(f"获取热门视频数据库连接失败: {e}")
return {
"total_watched": total_watched,
"category_stats": [],
"popular_categories": [],
"insights": [f"观看了 {total_watched} 个视频,但无法获取热门视频数据进行分区分析"]
}
# 3. 获取所有热门视频的bvid和分区信息
popular_video_categories = {} # bvid -> {"tid": tid, "tname": tname}
popular_bvids = set()
for year, conn in popular_connections.items():
try:
pop_cursor = conn.cursor()
# 查询所有热门视频的bvid、分区ID和分区名称
pop_cursor.execute("SELECT DISTINCT bvid, tid, tname FROM popular_videos WHERE bvid IS NOT NULL AND tid IS NOT NULL AND tname IS NOT NULL")
for row in pop_cursor.fetchall():
bvid, tid, tname = row
popular_bvids.add(bvid)
popular_video_categories[bvid] = {"tid": tid, "tname": tname}
except Exception as e:
print(f"查询 {year} 年热门视频数据失败: {e}")
continue
# 4. 统计用户观看的热门视频按分区分布
category_stats = {} # tname -> {"total_popular": count, "videos": []}
for video in user_videos:
bvid = video[0]
if bvid in popular_bvids and bvid in popular_video_categories:
category_info = popular_video_categories[bvid]
tname = category_info["tname"]
if tname not in category_stats:
category_stats[tname] = {
"category_name": tname,
"tid": category_info["tid"],
"total_popular": 0,
"videos": []
}
category_stats[tname]["total_popular"] += 1
category_stats[tname]["videos"].append({
"bvid": bvid,
"title": video[1],
"author": video[2],
"view_at": video[3]
})
# 5. 转换为列表并排序
category_list = list(category_stats.values())
category_list.sort(key=lambda x: x["total_popular"], reverse=True)
# 6. 计算统计数据
total_popular_watched = sum(cat["total_popular"] for cat in category_list)
popular_rate = (total_popular_watched / total_watched) * 100 if total_watched > 0 else 0
# 7. 生成洞察
insights = []
insights.append(f"今年观看了 {total_watched} 个视频")
insights.append(f"其中 {total_popular_watched} 个曾经是热门视频")
insights.append(f"热门视频分布在 {len(category_list)} 个分区")
if category_list:
top_category = category_list[0]
insights.append(f"最爱的热门分区:{top_category['category_name']}({top_category['total_popular']}个热门视频)")
# 分析分区偏好
if len(category_list) >= 3:
top_3_count = sum(cat["total_popular"] for cat in category_list[:3])
top_3_rate = (top_3_count / total_popular_watched) * 100 if total_popular_watched > 0 else 0
if top_3_rate >= 70:
insights.append("你的热门视频偏好很集中")
else:
insights.append("你的热门视频偏好很多样化")
# 分析热门敏感度
if popular_rate >= 50:
insights.append("你对各分区的热门内容都很敏感")
elif popular_rate >= 30:
insights.append("你在某些分区有不错的热门嗅觉")
else:
insights.append("你更专注于特定分区的小众内容")
# 8. 关闭热门视频数据库连接
for conn in popular_connections.values():
if conn:
conn.close()
return {
"total_watched": total_watched,
"total_popular_watched": total_popular_watched,
"popular_rate": round(popular_rate, 2),
"category_count": len(category_list),
"category_stats": category_list,
"popular_categories": category_list[:10], # 返回前10个热门分区
"insights": insights
}
def analyze_duration_popular_distribution(cursor, table_name: str, target_year: int) -> dict:
"""分析热门视频时长分布"""
# 1. 获取用户观看的所有视频
cursor.execute(f"""
SELECT DISTINCT bvid, title, author_name, view_at, duration, progress
FROM {table_name}
WHERE bvid IS NOT NULL AND bvid != '' AND duration IS NOT NULL AND duration > 0
ORDER BY view_at ASC
""")
user_videos = cursor.fetchall()
total_watched = len(user_videos)
if total_watched == 0:
return {
"total_watched": 0,
"duration_stats": [],
"popular_duration_videos": [],
"insights": ["本年度没有观看记录"]
}
# 2. 获取热门视频数据库连接
popular_connections = {}
try:
from scripts.popular_videos import get_multi_year_connections
popular_connections = get_multi_year_connections()
except Exception as e:
print(f"获取热门视频数据库连接失败: {e}")
return {
"total_watched": total_watched,
"duration_stats": [],
"popular_duration_videos": [],
"insights": [f"观看了 {total_watched} 个视频,但无法获取热门视频数据进行时长分析"]
}
# 3. 获取所有热门视频的bvid和时长信息
popular_video_durations = {} # bvid -> duration
popular_bvids = set()
for year, conn in popular_connections.items():
try:
pop_cursor = conn.cursor()
# 查询所有热门视频的bvid和时长
pop_cursor.execute("SELECT DISTINCT bvid, duration FROM popular_videos WHERE bvid IS NOT NULL AND duration IS NOT NULL AND duration > 0")
for row in pop_cursor.fetchall():
bvid, duration = row
popular_bvids.add(bvid)
popular_video_durations[bvid] = duration
except Exception as e:
print(f"查询 {year} 年热门视频数据失败: {e}")
continue
# 4. 定义时长区间(秒)
duration_ranges = {
"短视频": {"min": 0, "max": 300, "videos": [], "count": 0}, # ≤5分钟
"中等视频": {"min": 300, "max": 1200, "videos": [], "count": 0}, # 5-20分钟
"长视频": {"min": 1200, "max": 3600, "videos": [], "count": 0}, # 20-60分钟
"超长视频": {"min": 3600, "max": float('inf'), "videos": [], "count": 0} # >60分钟
}
# 5. 统计用户观看的热门视频按时长分布
total_popular_watched = 0
for video in user_videos:
bvid = video[0]
user_duration = video[4] # 用户历史记录中的时长
if bvid in popular_bvids:
total_popular_watched += 1
# 使用热门视频数据库中的时长,如果没有则使用用户记录中的时长
duration = popular_video_durations.get(bvid, user_duration)
# 分类到对应的时长区间
for range_name, range_info in duration_ranges.items():
if range_info["min"] <= duration < range_info["max"]:
range_info["count"] += 1
range_info["videos"].append({
"bvid": bvid,
"title": video[1],
"author": video[2],
"view_at": video[3],
"duration": duration,
"formatted_duration": format_duration(duration)
})
break
# 6. 计算统计数据
popular_rate = (total_popular_watched / total_watched) * 100 if total_watched > 0 else 0
# 7. 生成洞察
insights = []
insights.append(f"今年观看了 {total_watched} 个视频")
insights.append(f"其中 {total_popular_watched} 个曾经是热门视频")
if total_popular_watched > 0:
# 找出最偏爱的时长类型
max_count = 0
favorite_duration_type = ""
for range_name, range_info in duration_ranges.items():
if range_info["count"] > max_count:
max_count = range_info["count"]
favorite_duration_type = range_name
if favorite_duration_type:
favorite_rate = (max_count / total_popular_watched) * 100
insights.append(f"最偏爱{favorite_duration_type}热门内容({max_count}个,占{favorite_rate:.1f}%)")
# 分析时长偏好特征
short_count = duration_ranges["短视频"]["count"]
medium_count = duration_ranges["中等视频"]["count"]
long_count = duration_ranges["长视频"]["count"]
super_long_count = duration_ranges["超长视频"]["count"]
if short_count >= total_popular_watched * 0.5:
insights.append("你偏爱快节奏的短视频内容")
elif long_count + super_long_count >= total_popular_watched * 0.5:
insights.append("你喜欢深度的长视频内容")
elif medium_count >= total_popular_watched * 0.4:
insights.append("你偏爱适中时长的视频内容")
else:
insights.append("你对各种时长的视频都有涉猎")
# 8. 准备返回数据
duration_stats = []
for range_name, range_info in duration_ranges.items():
if range_info["count"] > 0:
duration_stats.append({
"duration_type": range_name,
"count": range_info["count"],
"percentage": round((range_info["count"] / total_popular_watched) * 100, 1) if total_popular_watched > 0 else 0,
"videos": sorted(range_info["videos"], key=lambda x: x["view_at"], reverse=True)[:5] # 最近观看的5个
})
# 按数量排序
duration_stats.sort(key=lambda x: x["count"], reverse=True)
# 9. 关闭热门视频数据库连接
for conn in popular_connections.values():
if conn:
conn.close()
return {
"total_watched": total_watched,
"total_popular_watched": total_popular_watched,
"popular_rate": round(popular_rate, 2),
"duration_stats": duration_stats,
"popular_duration_videos": duration_stats[:4], # 返回前4个时长类型
"insights": insights
}
def format_duration(seconds):
"""格式化时长显示"""
if seconds < 60:
return f"{int(seconds)}秒"
elif seconds < 3600:
minutes = int(seconds // 60)
secs = int(seconds % 60)
return f"{minutes}分{secs}秒" if secs > 0 else f"{minutes}分钟"
else:
hours = int(seconds // 3600)
minutes = int((seconds % 3600) // 60)
return f"{hours}小时{minutes}分钟" if minutes > 0 else f"{hours}小时"
@router.get("/popular-hit-rate", summary="获取热门视频命中率分析")
async def get_popular_hit_rate(
year: Optional[int] = Query(None, description="要分析的年份,不传则使用当前年份"),
use_cache: bool = Query(True, description="是否使用缓存,默认为True。如果为False则重新分析数据")
):
"""获取热门视频命中率分析
分析用户观看的视频中有多少曾经是热门视频
Args:
year: 要分析的年份,不传则使用当前年份
use_cache: 是否使用缓存,默认为True。如果为False则重新分析数据
Returns:
dict: 包含热门视频命中率分析的数据
"""
# 验证年份并获取表名
table_name, target_year, available_years = validate_year_and_get_table(year)
if table_name is None:
return available_years # 这里是错误响应
# 检查缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
cached_data = pattern_cache.get_cached_patterns(table_name, 'popular_hit_rate')
if cached_data:
print(f"使用 {target_year} 年的热门命中率分析缓存数据")
return cached_data
except Exception as e:
print(f"获取缓存失败: {e}")
conn = None
try:
# 连接数据库
conn = get_db()
cursor = conn.cursor()
# 分析热门视频命中率
hit_rate_analysis = analyze_popular_hit_rate(cursor, table_name, target_year)
# 构建响应
response = {
"status": "success",
"data": {
"hit_rate_analysis": hit_rate_analysis,
"year": target_year,
"available_years": available_years
}
}
# 更新缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
print(f"更新 {target_year} 年的热门命中率分析数据缓存")
pattern_cache.cache_patterns(table_name, 'popular_hit_rate', response)
except Exception as e:
print(f"更新缓存失败: {e}")
return response
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
finally:
if conn:
conn.close()
@router.get("/popular-prediction-ability", summary="获取热门预测能力分析")
async def get_popular_prediction_ability(
year: Optional[int] = Query(None, description="要分析的年份,不传则使用当前年份"),
use_cache: bool = Query(True, description="是否使用缓存,默认为True。如果为False则重新分析数据")
):
"""获取热门预测能力分析
分析用户观看的视频中,有多少后来成为了热门视频,评估用户的"慧眼识珠"能力
Args:
year: 要分析的年份,不传则使用当前年份
use_cache: 是否使用缓存,默认为True。如果为False则重新分析数据
Returns:
dict: 包含热门预测能力分析的数据
"""
# 验证年份并获取表名
table_name, target_year, available_years = validate_year_and_get_table(year)
if table_name is None:
return available_years # 这里是错误响应
# 检查缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
cached_data = pattern_cache.get_cached_patterns(table_name, 'popular_prediction_ability')
if cached_data:
print(f"使用 {target_year} 年的热门预测能力分析缓存数据")
return cached_data
except Exception as e:
print(f"获取缓存失败: {e}")
conn = None
try:
# 连接数据库
conn = get_db()
cursor = conn.cursor()
# 分析热门预测能力
prediction_analysis = analyze_popular_prediction_ability(cursor, table_name, target_year)
# 构建响应
response = {
"status": "success",
"data": {
"prediction_analysis": prediction_analysis,
"year": target_year,
"available_years": available_years
}
}
# 更新缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
print(f"更新 {target_year} 年的热门预测能力分析数据缓存")
pattern_cache.cache_patterns(table_name, 'popular_prediction_ability', response)
except Exception as e:
print(f"更新缓存失败: {e}")
return response
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
finally:
if conn:
conn.close()
@router.get("/author-popular-association", summary="获取UP主热门关联分析")
async def get_author_popular_association(
year: Optional[int] = Query(None, description="要分析的年份,不传则使用当前年份"),
use_cache: bool = Query(True, description="是否使用缓存,默认为True。如果为False则重新分析数据")
):
"""获取UP主热门关联分析
分析关注UP主的热门视频产出能力,统计"热门制造机"UP主
Args:
year: 要分析的年份,不传则使用当前年份
use_cache: 是否使用缓存,默认为True。如果为False则重新分析数据
Returns:
dict: 包含UP主热门关联分析的数据
"""
# 验证年份并获取表名
table_name, target_year, available_years = validate_year_and_get_table(year)
if table_name is None:
return available_years # 这里是错误响应
# 检查缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
cached_data = pattern_cache.get_cached_patterns(table_name, 'author_popular_association')
if cached_data:
print(f"使用 {target_year} 年的UP主热门关联分析缓存数据")
return cached_data
except Exception as e:
print(f"获取缓存失败: {e}")
conn = None
try:
# 连接数据库
conn = get_db()
cursor = conn.cursor()
# 分析UP主热门关联
association_analysis = analyze_author_popular_association(cursor, table_name, target_year)
# 构建响应
response = {
"status": "success",
"data": {
"association_analysis": association_analysis,
"year": target_year,
"available_years": available_years
}
}
# 更新缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
print(f"更新 {target_year} 年的UP主热门关联分析数据缓存")
pattern_cache.cache_patterns(table_name, 'author_popular_association', response)
except Exception as e:
print(f"更新缓存失败: {e}")
return response
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
finally:
if conn:
conn.close()
@router.get("/category-popular-distribution", summary="获取热门视频分区分布分析")
async def get_category_popular_distribution(
year: Optional[int] = Query(None, description="要分析的年份,不传则使用当前年份"),
use_cache: bool = Query(True, description="是否使用缓存,默认为True。如果为False则重新分析数据")
):
"""获取热门视频分区分布分析
分析用户观看的热门视频在各个分区的分布情况
Args:
year: 要分析的年份,不传则使用当前年份
use_cache: 是否使用缓存,默认为True。如果为False则重新分析数据
Returns:
dict: 包含热门视频分区分布分析的数据
"""
# 验证年份并获取表名
table_name, target_year, available_years = validate_year_and_get_table(year)
if table_name is None:
return available_years # 这里是错误响应
# 检查缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
cached_data = pattern_cache.get_cached_patterns(table_name, 'category_popular_distribution')
if cached_data:
print(f"使用 {target_year} 年的热门视频分区分布分析缓存数据")
return cached_data
except Exception as e:
print(f"获取缓存失败: {e}")
conn = None
try:
# 连接数据库
conn = get_db()
cursor = conn.cursor()
# 分析热门视频分区分布
distribution_analysis = analyze_category_popular_distribution(cursor, table_name, target_year)
# 构建响应
response = {
"status": "success",
"data": {
"distribution_analysis": distribution_analysis,
"year": target_year,
"available_years": available_years
}
}
# 更新缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
print(f"更新 {target_year} 年的热门视频分区分布分析数据缓存")
pattern_cache.cache_patterns(table_name, 'category_popular_distribution', response)
except Exception as e:
print(f"更新缓存失败: {e}")
return response
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
finally:
if conn:
conn.close()
@router.get("/duration-popular-distribution", summary="获取热门视频时长分布分析")
async def get_duration_popular_distribution(
year: Optional[int] = Query(None, description="要分析的年份,不传则使用当前年份"),
use_cache: bool = Query(True, description="是否使用缓存,默认为True。如果为False则重新分析数据")
):
"""获取热门视频时长分布分析
分析用户观看的热门视频在不同时长区间的分布情况
Args:
year: 要分析的年份,不传则使用当前年份
use_cache: 是否使用缓存,默认为True。如果为False则重新分析数据
Returns:
dict: 包含热门视频时长分布分析的数据
"""
# 验证年份并获取表名
table_name, target_year, available_years = validate_year_and_get_table(year)
if table_name is None:
return available_years # 这里是错误响应
# 检查缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
cached_data = pattern_cache.get_cached_patterns(table_name, 'duration_popular_distribution')
if cached_data:
print(f"使用 {target_year} 年的热门视频时长分布分析缓存数据")
return cached_data
except Exception as e:
print(f"获取缓存失败: {e}")
conn = None
try:
# 连接数据库
conn = get_db()
cursor = conn.cursor()
# 分析热门视频时长分布
duration_analysis = analyze_duration_popular_distribution(cursor, table_name, target_year)
# 构建响应
response = {
"status": "success",
"data": {
"duration_analysis": duration_analysis,
"year": target_year,
"available_years": available_years
}
}
# 更新缓存
if use_cache:
try:
from .title_pattern_discovery import pattern_cache
print(f"更新 {target_year} 年的热门视频时长分布分析数据缓存")
pattern_cache.cache_patterns(table_name, 'duration_popular_distribution', response)
except Exception as e:
print(f"更新缓存失败: {e}")
return response
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
finally:
if conn:
conn.close()
|
2929004360/ruoyi-sign
| 1,640
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/file/MimeTypeUtils.java
|
package com.ruoyi.common.utils.file;
/**
* 媒体类型工具类
*
* @author ruoyi
*/
public class MimeTypeUtils
{
public static final String IMAGE_PNG = "image/png";
public static final String IMAGE_JPG = "image/jpg";
public static final String IMAGE_JPEG = "image/jpeg";
public static final String IMAGE_BMP = "image/bmp";
public static final String IMAGE_GIF = "image/gif";
public static final String[] IMAGE_EXTENSION = { "bmp", "gif", "jpg", "jpeg", "png" };
public static final String[] FLASH_EXTENSION = { "swf", "flv" };
public static final String[] MEDIA_EXTENSION = { "swf", "flv", "mp3", "wav", "wma", "wmv", "mid", "avi", "mpg",
"asf", "rm", "rmvb" };
public static final String[] VIDEO_EXTENSION = { "mp4", "avi", "rmvb" };
public static final String[] DEFAULT_ALLOWED_EXTENSION = {
// 图片
"bmp", "gif", "jpg", "jpeg", "png",
// word excel powerpoint
"doc", "docx", "xls", "xlsx", "ppt", "pptx", "html", "htm", "txt",
// 压缩文件
"rar", "zip", "gz", "bz2",
// 视频格式
"mp4", "avi", "rmvb",
// pdf
"pdf" };
public static String getExtension(String prefix)
{
switch (prefix)
{
case IMAGE_PNG:
return "png";
case IMAGE_JPG:
return "jpg";
case IMAGE_JPEG:
return "jpeg";
case IMAGE_BMP:
return "bmp";
case IMAGE_GIF:
return "gif";
default:
return "";
}
}
}
|
2929004360/ruoyi-sign
| 7,877
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/file/FileUtils.java
|
package com.ruoyi.common.utils.file;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ArrayUtils;
import com.ruoyi.common.config.RuoYiConfig;
import com.ruoyi.common.utils.DateUtils;
import com.ruoyi.common.utils.StringUtils;
import com.ruoyi.common.utils.uuid.IdUtils;
import org.apache.commons.io.FilenameUtils;
/**
* 文件处理工具类
*
* @author ruoyi
*/
public class FileUtils
{
public static String FILENAME_PATTERN = "[a-zA-Z0-9_\\-\\|\\.\\u4e00-\\u9fa5]+";
/**
* 输出指定文件的byte数组
*
* @param filePath 文件路径
* @param os 输出流
* @return
*/
public static void writeBytes(String filePath, OutputStream os) throws IOException
{
FileInputStream fis = null;
try
{
File file = new File(filePath);
if (!file.exists())
{
throw new FileNotFoundException(filePath);
}
fis = new FileInputStream(file);
byte[] b = new byte[1024];
int length;
while ((length = fis.read(b)) > 0)
{
os.write(b, 0, length);
}
}
catch (IOException e)
{
throw e;
}
finally
{
IOUtils.close(os);
IOUtils.close(fis);
}
}
/**
* 写数据到文件中
*
* @param data 数据
* @return 目标文件
* @throws IOException IO异常
*/
public static String writeImportBytes(byte[] data) throws IOException
{
return writeBytes(data, RuoYiConfig.getImportPath());
}
/**
* 写数据到文件中
*
* @param data 数据
* @param uploadDir 目标文件
* @return 目标文件
* @throws IOException IO异常
*/
public static String writeBytes(byte[] data, String uploadDir) throws IOException
{
FileOutputStream fos = null;
String pathName = "";
try
{
String extension = getFileExtendName(data);
pathName = DateUtils.datePath() + "/" + IdUtils.fastUUID() + "." + extension;
File file = FileUploadUtils.getAbsoluteFile(uploadDir, pathName);
fos = new FileOutputStream(file);
fos.write(data);
}
finally
{
IOUtils.close(fos);
}
return FileUploadUtils.getPathFileName(uploadDir, pathName);
}
/**
* 删除文件
*
* @param filePath 文件
* @return
*/
public static boolean deleteFile(String filePath)
{
boolean flag = false;
File file = new File(filePath);
// 路径为文件且不为空则进行删除
if (file.isFile() && file.exists())
{
flag = file.delete();
}
return flag;
}
/**
* 文件名称验证
*
* @param filename 文件名称
* @return true 正常 false 非法
*/
public static boolean isValidFilename(String filename)
{
return filename.matches(FILENAME_PATTERN);
}
/**
* 检查文件是否可下载
*
* @param resource 需要下载的文件
* @return true 正常 false 非法
*/
public static boolean checkAllowDownload(String resource)
{
// 禁止目录上跳级别
if (StringUtils.contains(resource, ".."))
{
return false;
}
// 检查允许下载的文件规则
if (ArrayUtils.contains(MimeTypeUtils.DEFAULT_ALLOWED_EXTENSION, FileTypeUtils.getFileType(resource)))
{
return true;
}
// 不在允许下载的文件规则
return false;
}
/**
* 下载文件名重新编码
*
* @param request 请求对象
* @param fileName 文件名
* @return 编码后的文件名
*/
public static String setFileDownloadHeader(HttpServletRequest request, String fileName) throws UnsupportedEncodingException
{
final String agent = request.getHeader("USER-AGENT");
String filename = fileName;
if (agent.contains("MSIE"))
{
// IE浏览器
filename = URLEncoder.encode(filename, "utf-8");
filename = filename.replace("+", " ");
}
else if (agent.contains("Firefox"))
{
// 火狐浏览器
filename = new String(fileName.getBytes(), "ISO8859-1");
}
else if (agent.contains("Chrome"))
{
// google浏览器
filename = URLEncoder.encode(filename, "utf-8");
}
else
{
// 其它浏览器
filename = URLEncoder.encode(filename, "utf-8");
}
return filename;
}
/**
* 下载文件名重新编码
*
* @param response 响应对象
* @param realFileName 真实文件名
*/
public static void setAttachmentResponseHeader(HttpServletResponse response, String realFileName) throws UnsupportedEncodingException
{
String percentEncodedFileName = percentEncode(realFileName);
StringBuilder contentDispositionValue = new StringBuilder();
contentDispositionValue.append("attachment; filename=")
.append(percentEncodedFileName)
.append(";")
.append("filename*=")
.append("utf-8''")
.append(percentEncodedFileName);
response.addHeader("Access-Control-Expose-Headers", "Content-Disposition,download-filename");
response.setHeader("Content-disposition", contentDispositionValue.toString());
response.setHeader("download-filename", percentEncodedFileName);
}
/**
* 百分号编码工具方法
*
* @param s 需要百分号编码的字符串
* @return 百分号编码后的字符串
*/
public static String percentEncode(String s) throws UnsupportedEncodingException
{
String encode = URLEncoder.encode(s, StandardCharsets.UTF_8.toString());
return encode.replaceAll("\\+", "%20");
}
/**
* 获取图像后缀
*
* @param photoByte 图像数据
* @return 后缀名
*/
public static String getFileExtendName(byte[] photoByte)
{
String strFileExtendName = "jpg";
if ((photoByte[0] == 71) && (photoByte[1] == 73) && (photoByte[2] == 70) && (photoByte[3] == 56)
&& ((photoByte[4] == 55) || (photoByte[4] == 57)) && (photoByte[5] == 97))
{
strFileExtendName = "gif";
}
else if ((photoByte[6] == 74) && (photoByte[7] == 70) && (photoByte[8] == 73) && (photoByte[9] == 70))
{
strFileExtendName = "jpg";
}
else if ((photoByte[0] == 66) && (photoByte[1] == 77))
{
strFileExtendName = "bmp";
}
else if ((photoByte[1] == 80) && (photoByte[2] == 78) && (photoByte[3] == 71))
{
strFileExtendName = "png";
}
return strFileExtendName;
}
/**
* 获取文件名称 /profile/upload/2022/04/16/ruoyi.png -- ruoyi.png
*
* @param fileName 路径名称
* @return 没有文件路径的名称
*/
public static String getName(String fileName)
{
if (fileName == null)
{
return null;
}
int lastUnixPos = fileName.lastIndexOf('/');
int lastWindowsPos = fileName.lastIndexOf('\\');
int index = Math.max(lastUnixPos, lastWindowsPos);
return fileName.substring(index + 1);
}
/**
* 获取不带后缀文件名称 /profile/upload/2022/04/16/ruoyi.png -- ruoyi
*
* @param fileName 路径名称
* @return 没有文件路径和后缀的名称
*/
public static String getNameNotSuffix(String fileName)
{
if (fileName == null)
{
return null;
}
String baseName = FilenameUtils.getBaseName(fileName);
return baseName;
}
}
|
2977094657/BilibiliHistoryFetcher
| 4,244
|
routers/delete_history.py
|
import json
import sqlite3
from datetime import datetime
from typing import List
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from scripts.utils import load_config, get_output_path
router = APIRouter()
config = load_config()
class DeleteHistoryItem(BaseModel):
bvid: str
view_at: int # 观看时间戳
def get_db():
"""获取数据库连接"""
db_path = get_output_path(config['db_file'])
return sqlite3.connect(db_path)
def update_last_import_time(timestamp: int):
"""更新最后导入时间记录"""
record = {
"last_import_file": "",
"last_import_time": timestamp,
"last_import_date": datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d")
}
record_file = get_output_path('last_import.json')
with open(record_file, 'w', encoding='utf-8') as f:
json.dump(record, f, ensure_ascii=False, indent=4)
@router.delete("/batch-delete", summary="批量删除历史记录")
async def batch_delete_history(items: List[DeleteHistoryItem]):
"""批量删除历史记录
Args:
items: 要删除的视频记录列表,每个记录包含BV号和观看时间戳
Returns:
dict: 删除操作的结果
"""
if not items:
raise HTTPException(status_code=400, detail="请提供要删除的视频记录列表")
try:
conn = get_db()
cursor = conn.cursor()
# 获取当前所有年份的表
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND name LIKE 'bilibili_history_%'
""")
tables = [table[0] for table in cursor.fetchall()]
total_deleted = 0
deleted_details = []
min_timestamp = float('inf') # 记录最早的删除时间
# 确保删除记录表存在
cursor.execute("""
CREATE TABLE IF NOT EXISTS deleted_history (
id INTEGER PRIMARY KEY,
bvid TEXT NOT NULL,
view_at INTEGER NOT NULL,
delete_time INTEGER NOT NULL,
UNIQUE(bvid, view_at)
)
""")
# 获取当前时间戳作为删除时间
current_time = int(datetime.now().timestamp())
for item in items:
# 从时间戳获取年份
year = datetime.fromtimestamp(item.view_at).year
table_name = f"bilibili_history_{year}"
if table_name in tables:
# 在对应年份的表中删除指定的记录
query = f"""
DELETE FROM {table_name}
WHERE bvid = ? AND view_at = ?
"""
cursor.execute(query, (item.bvid, item.view_at))
if cursor.rowcount > 0:
total_deleted += cursor.rowcount
deleted_details.append({
"bvid": item.bvid,
"view_at": item.view_at,
"view_time": datetime.fromtimestamp(item.view_at).strftime("%Y-%m-%d %H:%M:%S")
})
# 更新最早的删除时间
min_timestamp = min(min_timestamp, item.view_at)
# 将删除的记录添加到删除记录表中
try:
cursor.execute("""
INSERT INTO deleted_history (bvid, view_at, delete_time)
VALUES (?, ?, ?)
""", (item.bvid, item.view_at, current_time))
except sqlite3.IntegrityError:
# 如果记录已存在,则更新删除时间
cursor.execute("""
UPDATE deleted_history
SET delete_time = ?
WHERE bvid = ? AND view_at = ?
""", (current_time, item.bvid, item.view_at))
conn.commit()
# 如果有记录被删除,更新last_import.json
if total_deleted > 0 and min_timestamp != float('inf'):
update_last_import_time(min_timestamp - 1) # 减1秒以确保能获取到被删除时间点的记录
return {
"status": "success",
"message": f"成功删除 {total_deleted} 条历史记录",
"data": {
"deleted_count": total_deleted,
"deleted_records": deleted_details
}
}
except sqlite3.Error as e:
raise HTTPException(
status_code=500,
detail=f"数据库操作失败: {str(e)}"
)
finally:
if 'conn' in locals() and conn:
conn.close()
|
281677160/openwrt-package
| 7,178
|
luci-app-homeproxy/root/usr/share/rpcd/ucode/luci.homeproxy
|
#!/usr/bin/ucode
/*
* SPDX-License-Identifier: GPL-2.0-only
*
* Copyright (C) 2023-2024 ImmortalWrt.org
*/
'use strict';
import { access, error, lstat, popen, readfile, writefile } from 'fs';
/* Kanged from ucode/luci */
function shellquote(s) {
return `'${replace(s, "'", "'\\''")}'`;
}
function hasKernelModule(kmod) {
return (system(sprintf('[ -e "/lib/modules/$(uname -r)"/%s ]', shellquote(kmod))) === 0);
}
const HP_DIR = '/etc/homeproxy';
const RUN_DIR = '/var/run/homeproxy';
const methods = {
acllist_read: {
args: { type: 'type' },
call: function(req) {
if (index(['direct_list', 'proxy_list'], req.args?.type) === -1)
return { content: null, error: 'illegal type' };
const filecontent = readfile(`${HP_DIR}/resources/${req.args?.type}.txt`);
return { content: filecontent };
}
},
acllist_write: {
args: { type: 'type', content: 'content' },
call: function(req) {
if (index(['direct_list', 'proxy_list'], req.args?.type) === -1)
return { result: false, error: 'illegal type' };
const file = `${HP_DIR}/resources/${req.args?.type}.txt`;
let content = req.args?.content;
/* Sanitize content */
if (content) {
content = trim(content);
content = replace(content, /\r\n?/g, '\n');
if (!match(content, /\n$/))
content += '\n';
}
system(`mkdir -p ${HP_DIR}/resources`);
writefile(file, content);
return { result: true };
}
},
certificate_write: {
args: { filename: 'filename' },
call: function(req) {
const writeCertificate = (filename, priv) => {
const tmpcert = '/tmp/homeproxy_certificate.tmp';
const filestat = lstat(tmpcert);
if (!filestat || filestat.type !== 'file' || filestat.size <= 0) {
system(`rm -f ${tmpcert}`);
return { result: false, error: 'empty certificate file' };
}
let filecontent = readfile(tmpcert);
if (is_binary(filecontent)) {
system(`rm -f ${tmpcert}`);
return { result: false, error: 'illegal file type: binary' };
}
/* Kanged from luci-proto-openconnect */
const beg = priv ? /^-----BEGIN (RSA|EC) PRIVATE KEY-----$/ : /^-----BEGIN CERTIFICATE-----$/,
end = priv ? /^-----END (RSA|EC) PRIVATE KEY-----$/ : /^-----END CERTIFICATE-----$/,
lines = split(trim(filecontent), /[\r\n]/);
let start = false, i;
for (i = 0; i < length(lines); i++) {
if (match(lines[i], beg))
start = true;
else if (start && !b64dec(lines[i]) && length(lines[i]) !== 64)
break;
}
if (!start || i < length(lines) - 1 || !match(lines[i], end)) {
system(`rm -f ${tmpcert}`);
return { result: false, error: 'this does not look like a correct PEM file' };
}
/* Sanitize certificate */
filecontent = trim(filecontent);
filecontent = replace(filecontent, /\r\n?/g, '\n');
if (!match(filecontent, /\n$/))
filecontent += '\n';
system(`mkdir -p ${HP_DIR}/certs`);
writefile(`${HP_DIR}/certs/${filename}.pem`, filecontent);
system(`rm -f ${tmpcert}`);
return { result: true };
};
const filename = req.args?.filename;
switch (filename) {
case 'client_ca':
case 'server_publickey':
return writeCertificate(filename, false);
break;
case 'server_privatekey':
return writeCertificate(filename, true);
break;
default:
return { result: false, error: 'illegal cerificate filename' };
break;
}
}
},
connection_check: {
args: { site: 'site' },
call: function(req) {
let url;
switch(req.args?.site) {
case 'baidu':
url = 'https://www.baidu.com';
break;
case 'google':
url = 'https://www.google.com';
break;
default:
return { result: false, error: 'illegal site' };
break;
}
return { result: (system(`/usr/bin/wget --spider -qT3 ${url} 2>"/dev/null"`, 3100) === 0) };
}
},
log_clean: {
args: { type: 'type' },
call: function(req) {
if (!(req.args?.type in ['homeproxy', 'sing-box-c', 'sing-box-s']))
return { result: false, error: 'illegal type' };
const filestat = lstat(`${RUN_DIR}/${req.args?.type}.log`);
if (filestat)
writefile(`${RUN_DIR}/${req.args?.type}.log`, '');
return { result: true };
}
},
singbox_generator: {
args: { type: 'type', params: 'params' },
call: function(req) {
if (!(req.args?.type in ['ech-keypair', 'uuid', 'reality-keypair', 'vapid-keypair', 'wg-keypair']))
return { result: false, error: 'illegal type' };
const type = req.args?.type;
let result = {};
const fd = popen('/usr/bin/sing-box generate ' + type + ` ${req.args?.params || ''}`);
if (fd) {
let ech_cfg_set = false;
let ech_key_set = false;
for (let line = fd.read('line'); length(line); line = fd.read('line')) {
if (type === 'uuid')
result.uuid = trim(line);
else if (type in ['reality-keypair', 'vapid-keypair', 'wg-keypair']) {
let priv = match(trim(line), /PrivateKey: (.*)/);
if (priv)
result.private_key = priv[1];
let pub = match(trim(line), /PublicKey: (.*)/);
if (pub)
result.public_key = pub[1];
} else if (type in ['ech-keypair']) {
if (trim(line) === '-----BEGIN ECH CONFIGS-----')
ech_cfg_set = true;
else if (trim(line) === '-----BEGIN ECH KEYS-----')
ech_key_set = true;
if (ech_cfg_set)
result.ech_cfg = result.ech_cfg ? result.ech_cfg + '\n' + trim(line) : trim(line) ;
if (ech_key_set)
result.ech_key = result.ech_key ? result.ech_key + '\n' + trim(line) : trim(line) ;
if (trim(line) === '-----END ECH CONFIGS-----')
ech_cfg_set = false;
else if (trim(line) === '-----END ECH KEYS-----')
ech_key_set = false;
}
}
fd.close();
}
return { result };
}
},
singbox_get_features: {
call: function() {
let features = {};
const fd = popen('/usr/bin/sing-box version');
if (fd) {
for (let line = fd.read('line'); length(line); line = fd.read('line')) {
if (match(trim(line), /^sing-box version (.*)/))
features.version = match(trim(line), /^sing-box version (.*)/)[1];
let tags = match(trim(line), /^Tags: (.*)/);
if (tags)
for (let i in split(tags[1], ','))
features[i] = true;
}
fd.close();
}
features.hp_has_ip_full = access('/usr/libexec/ip-full');
features.hp_has_tcp_brutal = hasKernelModule('brutal.ko');
features.hp_has_tproxy = hasKernelModule('nft_tproxy.ko') || access('/etc/modules.d/nft-tproxy');
features.hp_has_tun = hasKernelModule('tun.ko') || access('/etc/modules.d/30-tun');
return features;
}
},
resources_get_version: {
args: { type: 'type' },
call: function(req) {
const version = trim(readfile(`${HP_DIR}/resources/${req.args?.type}.ver`));
return { version: version, error: error() };
}
},
resources_update: {
args: { type: 'type' },
call: function(req) {
if (req.args?.type) {
const type = shellquote(req.args?.type);
const exit_code = system(`${HP_DIR}/scripts/update_resources.sh ${type}`);
return { status: exit_code };
} else
return { status: 255, error: 'illegal type' };
}
}
};
return { 'luci.homeproxy': methods };
|
2929004360/ruoyi-sign
| 1,558
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/sign/Md5Utils.java
|
package com.ruoyi.common.utils.sign;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Md5加密方法
*
* @author ruoyi
*/
public class Md5Utils
{
private static final Logger log = LoggerFactory.getLogger(Md5Utils.class);
private static byte[] md5(String s)
{
MessageDigest algorithm;
try
{
algorithm = MessageDigest.getInstance("MD5");
algorithm.reset();
algorithm.update(s.getBytes("UTF-8"));
byte[] messageDigest = algorithm.digest();
return messageDigest;
}
catch (Exception e)
{
log.error("MD5 Error...", e);
}
return null;
}
private static final String toHex(byte hash[])
{
if (hash == null)
{
return null;
}
StringBuffer buf = new StringBuffer(hash.length * 2);
int i;
for (i = 0; i < hash.length; i++)
{
if ((hash[i] & 0xff) < 0x10)
{
buf.append("0");
}
buf.append(Long.toString(hash[i] & 0xff, 16));
}
return buf.toString();
}
public static String hash(String s)
{
try
{
return new String(toHex(md5(s)).getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8);
}
catch (Exception e)
{
log.error("not supported charset...{}", e);
return s;
}
}
}
|
2929004360/ruoyi-sign
| 9,170
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/sign/Base64.java
|
package com.ruoyi.common.utils.sign;
/**
* Base64工具类
*
* @author ruoyi
*/
public final class Base64
{
static private final int BASELENGTH = 128;
static private final int LOOKUPLENGTH = 64;
static private final int TWENTYFOURBITGROUP = 24;
static private final int EIGHTBIT = 8;
static private final int SIXTEENBIT = 16;
static private final int FOURBYTE = 4;
static private final int SIGN = -128;
static private final char PAD = '=';
static final private byte[] base64Alphabet = new byte[BASELENGTH];
static final private char[] lookUpBase64Alphabet = new char[LOOKUPLENGTH];
static
{
for (int i = 0; i < BASELENGTH; ++i)
{
base64Alphabet[i] = -1;
}
for (int i = 'Z'; i >= 'A'; i--)
{
base64Alphabet[i] = (byte) (i - 'A');
}
for (int i = 'z'; i >= 'a'; i--)
{
base64Alphabet[i] = (byte) (i - 'a' + 26);
}
for (int i = '9'; i >= '0'; i--)
{
base64Alphabet[i] = (byte) (i - '0' + 52);
}
base64Alphabet['+'] = 62;
base64Alphabet['/'] = 63;
for (int i = 0; i <= 25; i++)
{
lookUpBase64Alphabet[i] = (char) ('A' + i);
}
for (int i = 26, j = 0; i <= 51; i++, j++)
{
lookUpBase64Alphabet[i] = (char) ('a' + j);
}
for (int i = 52, j = 0; i <= 61; i++, j++)
{
lookUpBase64Alphabet[i] = (char) ('0' + j);
}
lookUpBase64Alphabet[62] = (char) '+';
lookUpBase64Alphabet[63] = (char) '/';
}
private static boolean isWhiteSpace(char octect)
{
return (octect == 0x20 || octect == 0xd || octect == 0xa || octect == 0x9);
}
private static boolean isPad(char octect)
{
return (octect == PAD);
}
private static boolean isData(char octect)
{
return (octect < BASELENGTH && base64Alphabet[octect] != -1);
}
/**
* Encodes hex octects into Base64
*
* @param binaryData Array containing binaryData
* @return Encoded Base64 array
*/
public static String encode(byte[] binaryData)
{
if (binaryData == null)
{
return null;
}
int lengthDataBits = binaryData.length * EIGHTBIT;
if (lengthDataBits == 0)
{
return "";
}
int fewerThan24bits = lengthDataBits % TWENTYFOURBITGROUP;
int numberTriplets = lengthDataBits / TWENTYFOURBITGROUP;
int numberQuartet = fewerThan24bits != 0 ? numberTriplets + 1 : numberTriplets;
char encodedData[] = null;
encodedData = new char[numberQuartet * 4];
byte k = 0, l = 0, b1 = 0, b2 = 0, b3 = 0;
int encodedIndex = 0;
int dataIndex = 0;
for (int i = 0; i < numberTriplets; i++)
{
b1 = binaryData[dataIndex++];
b2 = binaryData[dataIndex++];
b3 = binaryData[dataIndex++];
l = (byte) (b2 & 0x0f);
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4) : (byte) ((b2) >> 4 ^ 0xf0);
byte val3 = ((b3 & SIGN) == 0) ? (byte) (b3 >> 6) : (byte) ((b3) >> 6 ^ 0xfc);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[val2 | (k << 4)];
encodedData[encodedIndex++] = lookUpBase64Alphabet[(l << 2) | val3];
encodedData[encodedIndex++] = lookUpBase64Alphabet[b3 & 0x3f];
}
// form integral number of 6-bit groups
if (fewerThan24bits == EIGHTBIT)
{
b1 = binaryData[dataIndex];
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[k << 4];
encodedData[encodedIndex++] = PAD;
encodedData[encodedIndex++] = PAD;
}
else if (fewerThan24bits == SIXTEENBIT)
{
b1 = binaryData[dataIndex];
b2 = binaryData[dataIndex + 1];
l = (byte) (b2 & 0x0f);
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4) : (byte) ((b2) >> 4 ^ 0xf0);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[val2 | (k << 4)];
encodedData[encodedIndex++] = lookUpBase64Alphabet[l << 2];
encodedData[encodedIndex++] = PAD;
}
return new String(encodedData);
}
/**
* Decodes Base64 data into octects
*
* @param encoded string containing Base64 data
* @return Array containind decoded data.
*/
public static byte[] decode(String encoded)
{
if (encoded == null)
{
return null;
}
char[] base64Data = encoded.toCharArray();
// remove white spaces
int len = removeWhiteSpace(base64Data);
if (len % FOURBYTE != 0)
{
return null;// should be divisible by four
}
int numberQuadruple = (len / FOURBYTE);
if (numberQuadruple == 0)
{
return new byte[0];
}
byte decodedData[] = null;
byte b1 = 0, b2 = 0, b3 = 0, b4 = 0;
char d1 = 0, d2 = 0, d3 = 0, d4 = 0;
int i = 0;
int encodedIndex = 0;
int dataIndex = 0;
decodedData = new byte[(numberQuadruple) * 3];
for (; i < numberQuadruple - 1; i++)
{
if (!isData((d1 = base64Data[dataIndex++])) || !isData((d2 = base64Data[dataIndex++]))
|| !isData((d3 = base64Data[dataIndex++])) || !isData((d4 = base64Data[dataIndex++])))
{
return null;
} // if found "no data" just return null
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
decodedData[encodedIndex++] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
decodedData[encodedIndex++] = (byte) (b3 << 6 | b4);
}
if (!isData((d1 = base64Data[dataIndex++])) || !isData((d2 = base64Data[dataIndex++])))
{
return null;// if found "no data" just return null
}
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
d3 = base64Data[dataIndex++];
d4 = base64Data[dataIndex++];
if (!isData((d3)) || !isData((d4)))
{// Check if they are PAD characters
if (isPad(d3) && isPad(d4))
{
if ((b2 & 0xf) != 0)// last 4 bits should be zero
{
return null;
}
byte[] tmp = new byte[i * 3 + 1];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex] = (byte) (b1 << 2 | b2 >> 4);
return tmp;
}
else if (!isPad(d3) && isPad(d4))
{
b3 = base64Alphabet[d3];
if ((b3 & 0x3) != 0)// last 2 bits should be zero
{
return null;
}
byte[] tmp = new byte[i * 3 + 2];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
tmp[encodedIndex] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
return tmp;
}
else
{
return null;
}
}
else
{ // No PAD e.g 3cQl
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
decodedData[encodedIndex++] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
decodedData[encodedIndex++] = (byte) (b3 << 6 | b4);
}
return decodedData;
}
/**
* remove WhiteSpace from MIME containing encoded Base64 data.
*
* @param data the byte array of base64 data (with WS)
* @return the new length
*/
private static int removeWhiteSpace(char[] data)
{
if (data == null)
{
return 0;
}
// count characters that's not whitespace
int newSize = 0;
int len = data.length;
for (int i = 0; i < len; i++)
{
if (!isWhiteSpace(data[i]))
{
data[newSize++] = data[i];
}
}
return newSize;
}
}
|
2977094657/BilibiliHistoryFetcher
| 32,655
|
routers/history.py
|
import json
import os
import hashlib
try:
import pysqlite3 as sqlite3
except ImportError:
import sqlite3
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Query, HTTPException
from pydantic import BaseModel
from scripts.utils import get_output_path, load_config
from scripts.image_downloader import ImageDownloader
router = APIRouter()
config = load_config()
downloader = ImageDownloader()
def get_db():
"""获取数据库连接,并确保数据库版本兼容性"""
db_path = get_output_path(config['db_file'])
# 检查数据库文件是否存在
db_exists = os.path.exists(db_path)
try:
# 连接数据库
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# 设置数据库兼容性参数
pragmas = [
('legacy_file_format', 1),
('journal_mode', 'DELETE'),
('synchronous', 'NORMAL'),
('user_version', 317) # 使用固定的用户版本号
]
for pragma, value in pragmas:
cursor.execute(f'PRAGMA {pragma}={value}')
conn.commit()
if not db_exists:
print("数据库文件不存在,将创建新数据库")
print("已配置数据库兼容性设置")
return conn
except sqlite3.Error as e:
print(f"数据库连接错误: {str(e)}")
if 'conn' in locals() and conn:
conn.close()
raise HTTPException(
status_code=500,
detail=f"数据库连接失败: {str(e)}"
)
def get_available_years():
"""获取数据库中所有可用的年份"""
print("开始获取可用年份列表")
conn = None
try:
# 获取数据库连接
conn = get_db()
if conn is None:
print("无法连接到数据库")
return [datetime.now().year] # 返回当前年份作为默认值
cursor = conn.cursor()
# 查询所有历史记录表
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND name LIKE 'bilibili_history_%'
ORDER BY name DESC
""")
tables = cursor.fetchall()
print(f"找到的表: {tables}")
years = []
for (table_name,) in tables:
try:
year = int(table_name.split('_')[-1])
years.append(year)
print(f"添加年份: {year} (来自表 {table_name})")
except (ValueError, IndexError) as e:
print(f"处理表名 {table_name} 时出错: {e}")
continue
sorted_years = sorted(years, reverse=True)
print(f"最终年份列表: {sorted_years}")
# 如果没有找到年份,返回当前年份
if not sorted_years:
current_year = datetime.now().year
print(f"未找到年份,返回当前年份: {current_year}")
return [current_year]
return sorted_years
except sqlite3.Error as e:
print(f"获取年份列表时发生数据库错误: {e}")
return [datetime.now().year] # 返回当前年份作为默认值
except Exception as e:
print(f"获取年份列表时发生未知错误: {e}")
return [datetime.now().year] # 返回当前年份作为默认值
finally:
if conn:
try:
conn.close()
print("数据库连接已关闭")
except Exception as e:
print(f"关闭数据库连接时出错: {e}")
@router.get("/available-years", summary="获取可用的年份列表")
async def get_years():
"""获取所有可用的年份列表"""
years = get_available_years()
if not years:
return {
"status": "error",
"message": "未找到任何历史记录数据"
}
return {
"status": "success",
"data": years
}
def _process_image_url(url: str, image_type: str, use_local: bool, use_sessdata: bool = True) -> str:
"""处理图片URL,根据需要返回本地路径或原始URL
Args:
url: 原始图片URL
image_type: 图片类型 (covers 或 avatars)
use_local: 是否使用本地图片
use_sessdata: 是否在下载图片时使用SESSDATA(对于公开内容如视频封面和头像,可以不使用SESSDATA)
Returns:
str: 处理后的URL
"""
# 如果不使用本地图片或URL为空,直接返回原始URL
if not use_local or not url:
# 如果不使用SESSDATA,移除URL中可能包含的SESSDATA参数
if not use_sessdata and '?' in url:
# 移除URL中的所有参数,保留纯净的图片URL
url = url.split('?')[0]
return url
try:
# 计算URL的哈希值(只使用基本URL部分,不包含参数)
base_url = url.split('?')[0] if '?' in url else url
file_hash = hashlib.md5(base_url.encode()).hexdigest()
# 检查图片类型是否有效
if image_type not in ('covers', 'avatars'):
print(f"无效的图片类型: {image_type}")
return url
# 构建本地图片URL(返回相对路径,前端自行拼接域名)
base_url = "/images/local"
local_url = f"{base_url}/{image_type}/{file_hash}"
return local_url
except Exception as e:
print(f"处理图片URL时出错: {str(e)}")
return url
def _process_record(record: dict, use_local: bool, use_sessdata: bool = True) -> dict:
"""处理单条记录,转换图片URL
Args:
record: 原始记录
use_local: 是否使用本地图片
use_sessdata: 是否在下载图片时使用SESSDATA
Returns:
dict: 处理后的记录
"""
# 处理封面图片
if 'cover' in record and record['cover']:
record['cover'] = _process_image_url(record['cover'], 'covers', use_local, use_sessdata)
# 处理作者头像
if 'author_face' in record and record['author_face']:
record['author_face'] = _process_image_url(record['author_face'], 'avatars', use_local, use_sessdata)
# 解析 covers 字段的 JSON 字符串
if 'covers' in record and record['covers']:
try:
# 如果是字符串,尝试解析为 JSON
if isinstance(record['covers'], str):
covers = json.loads(record['covers'])
# 处理每个封面URL
if isinstance(covers, list):
record['covers'] = [_process_image_url(url, 'covers', use_local, use_sessdata) for url in covers]
else:
record['covers'] = []
else:
record['covers'] = []
except json.JSONDecodeError:
print(f"解析 covers JSON 失败: {record['covers']}")
record['covers'] = []
else:
record['covers'] = []
return record
@router.get("/all", summary="分页查询历史记录")
async def get_history_page(
page: int = Query(1, description="当前页码"),
size: int = Query(10, description="每页记录数"),
sort_order: int = Query(0, description="排序顺序,0为降序,1为升序"),
tag_name: Optional[str] = Query(None, description="视频子分区名称"),
main_category: Optional[str] = Query(None, description="主分区名称"),
date_range: Optional[str] = Query(None, description="日期范围,格式为yyyyMMdd-yyyyMMdd"),
use_local_images: bool = Query(False, description="是否使用本地图片"),
use_sessdata: bool = Query(True, description="是否在图片URL中使用SESSDATA"),
business: Optional[str] = Query(None, description="业务类型,如archive(普通视频)、pgc(番剧)、live(直播)、article-list(文集)、article(文章)")
):
"""分页查询历史记录,支持跨年份查询"""
print("\n=== 接收到的请求参数 ===")
print(f"页码(page): {page}")
print(f"每页记录数(size): {size}")
print(f"排序顺序(sort_order): {'升序' if sort_order == 1 else '降序'}")
print(f"子分区名称(tag_name): {tag_name if tag_name else '无'}")
print(f"主分区名称(main_category): {main_category if main_category else '无'}")
print(f"日期范围(date_range): {date_range if date_range else '无'}")
print(f"是否使用本地图片(use_local_images): {use_local_images}")
print(f"是否使用SESSDATA(use_sessdata): {use_sessdata}")
print(f"业务类型(business): {business if business else '全部'}")
print("=====================\n")
try:
conn = get_db()
cursor = conn.cursor()
# 获取可用年份列表
available_years = get_available_years()
if not available_years:
return {
"status": "error",
"message": "未找到任何历史记录数据"
}
# 构建UNION ALL查询
queries = []
params = []
# 处理日期范围
start_timestamp = None
end_timestamp = None
if date_range:
try:
start_date, end_date = date_range.split('-')
start_timestamp = int(datetime.strptime(start_date, '%Y%m%d').timestamp())
end_timestamp = int(datetime.strptime(end_date, '%Y%m%d').timestamp()) + 86400
except ValueError:
return {"status": "error", "message": "日期格式无效,应为yyyyMMdd-yyyyMMdd"}
# 为每个年份构建查询
for year in available_years:
table_name = f"bilibili_history_{year}"
query = f"SELECT * FROM {table_name} WHERE 1=1"
# 添加日期范围条件
if start_timestamp is not None and end_timestamp is not None:
query += " AND view_at >= ? AND view_at < ?"
params.extend([start_timestamp, end_timestamp])
# 添加分类筛选
if main_category:
query += " AND main_category = ?"
params.append(main_category)
elif tag_name:
query += " AND tag_name = ?"
params.append(tag_name)
# 添加业务类型筛选
if business:
query += " AND business = ?"
params.append(business)
queries.append(query)
# 组合所有查询
base_query = " UNION ALL ".join(queries)
# 获取总记录数
count_query = f"SELECT COUNT(*) FROM ({base_query})"
cursor.execute(count_query, params)
total = cursor.fetchone()[0]
# 添加排序和分页
final_query = f"""
SELECT * FROM ({base_query})
ORDER BY view_at {('ASC' if sort_order == 1 else 'DESC')}
LIMIT ? OFFSET ?
"""
params.extend([size, (page - 1) * size])
print("=== SQL查询构建 ===")
print(f"最终SQL: {final_query}")
print(f"参数: {params}")
print("==================\n")
# 执行查询
cursor.execute(final_query, params)
columns = [description[0] for description in cursor.description]
records = []
for row in cursor.fetchall():
record = dict(zip(columns, row))
record = _process_record(record, use_local_images, use_sessdata)
records.append(record)
print("=== 响应结果 ===")
print(f"返回记录数: {len(records)}")
print(f"第一条记录: {records[0] if records else '无记录'}")
print("================\n")
return {
"status": "success",
"data": {
"records": records,
"total": total,
"size": size,
"current": page,
"available_years": available_years
}
}
except sqlite3.Error as e:
error_msg = f"数据库错误: {str(e)}"
print(f"=== 错误 ===\n{error_msg}\n===========")
return {"status": "error", "message": error_msg}
finally:
if conn:
conn.close()
def process_search_keyword(keyword: str) -> str:
"""处理搜索关键词,返回处理后的关键词
Args:
keyword: 搜索关键词
Returns:
str: 处理后的关键词
"""
if not keyword:
return ""
# 移除多余的空格,但保留单个空格
keyword = ' '.join(keyword.split())
return keyword
def create_fts_table(conn, table_name: str):
"""创建全文搜索虚拟表"""
cursor = conn.cursor()
try:
# 检查原表是否存在
cursor.execute(f"""
SELECT name FROM sqlite_master
WHERE type='table' AND name='bilibili_history_{table_name}'
""")
if not cursor.fetchone():
return False
# 创建FTS5虚拟表
fts_table = f"bilibili_history_{table_name}_fts"
cursor.execute(f"""
CREATE VIRTUAL TABLE IF NOT EXISTS {fts_table} USING fts5(
title,
author_name,
tag_name,
main_category,
remark,
title_pinyin,
content='bilibili_history_{table_name}',
content_rowid='id'
)
""")
# 创建触发器以保持FTS表同步
cursor.execute(f"""
CREATE TRIGGER IF NOT EXISTS history_{table_name}_ai AFTER INSERT ON bilibili_history_{table_name} BEGIN
INSERT INTO {fts_table}(
rowid, title, author_name, tag_name, main_category, remark, title_pinyin
)
VALUES (
new.id, new.title, new.author_name, new.tag_name, new.main_category,
new.remark, new.title
);
END;
""")
cursor.execute(f"""
CREATE TRIGGER IF NOT EXISTS history_{table_name}_ad AFTER DELETE ON bilibili_history_{table_name} BEGIN
INSERT INTO {fts_table}({fts_table}, rowid, title, author_name, tag_name, main_category, remark)
VALUES('delete', old.id, old.title, old.author_name, old.tag_name, old.main_category, old.remark);
END;
""")
cursor.execute(f"""
CREATE TRIGGER IF NOT EXISTS history_{table_name}_au AFTER UPDATE ON bilibili_history_{table_name} BEGIN
INSERT INTO {fts_table}({fts_table}, rowid, title, author_name, tag_name, main_category, remark)
VALUES('delete', old.id, old.title, old.author_name, old.tag_name, old.main_category, old.remark);
INSERT INTO {fts_table}(rowid, title, author_name, tag_name, main_category, remark)
VALUES (new.id, new.title, new.author_name, new.tag_name, new.main_category, new.remark);
END;
""")
# 初始化FTS表数据
cursor.execute(f"""
INSERT OR REPLACE INTO {fts_table}(
rowid, title, author_name, tag_name, main_category, remark, title_pinyin
)
SELECT
id, title, author_name, tag_name, main_category, remark, title
FROM bilibili_history_{table_name}
""")
conn.commit()
return True
except sqlite3.Error as e:
print(f"创建FTS表时出错: {str(e)}")
conn.rollback()
return False
def build_field_search_conditions(field: str, search: str, exact_match: bool) -> tuple:
"""构建字段搜索条件"""
params = []
conditions = []
if exact_match:
# 精确匹配
conditions.append(f"{field} = ?")
params.append(search)
else:
# 模糊匹配
conditions.append(f"{field} LIKE ?")
params.append(f"%{search}%")
# 使用 OR 连接所有条件
condition = "(" + " OR ".join(conditions) + ")"
print(f"\n=== 字段条件构建 [{field}] ===")
print(f"条件: {condition}")
print(f"参数: {params}")
print("===================")
return condition, params
@router.get("/search", summary="搜索历史记录")
async def search_history(
page: int = Query(1, description="当前页码"),
size: int = Query(30, description="每页记录数"),
sortOrder: int = Query(0, description="排序顺序,0为降序,1为升序"),
search: Optional[str] = Query(None, description="搜索关键词"),
search_type: Optional[str] = Query("all", description="搜索类型:all-全部, title-标题, author-作者, tag-分区, remark-备注"),
use_sessdata: bool = Query(True, description="是否在图片URL中使用SESSDATA"),
use_local_images: bool = Query(False, description="是否使用本地图片")
):
"""高级搜索历史记录,按观看时间排序,使用模糊匹配"""
try:
print("\n=== 搜索开始 ===")
print(f"关键词: {search}")
print(f"类型: {search_type}")
print(f"匹配方式: 模糊匹配")
print(f"排序顺序: {'升序' if sortOrder == 1 else '降序'}")
print(f"是否使用SESSDATA: {use_sessdata}")
print(f"是否使用本地图片: {use_local_images}")
print("==============\n")
conn = get_db()
cursor = conn.cursor()
# 获取可用年份列表
available_years = get_available_years()
if not available_years:
return {
"status": "error",
"message": "未找到任何历史记录数据"
}
# 构建每个年份的子查询
sub_queries = []
base_params = []
# 处理搜索关键词
field_map = {
"title": "title",
"author": "author_name",
"tag": "tag_name",
"remark": "remark"
}
where_clause = ""
search_params = []
if search:
search = process_search_keyword(search)
print(f"\n处理后的搜索关键词: {search}\n")
print("\n=== 开始构建查询条件 ===")
# 构建WHERE子句(强制使用模糊匹配)
exact_match = False
if search_type == "all":
field_conditions = []
for field_name, field in field_map.items():
print(f"\n处理字段: {field_name}")
condition, params = build_field_search_conditions(field, search, exact_match)
field_conditions.append(condition)
search_params.extend(params)
print(f"当前参数数量: {len(search_params)}")
if field_conditions:
where_clause = f"WHERE ({' OR '.join(field_conditions)})"
else:
field = field_map.get(search_type)
if field:
condition, params = build_field_search_conditions(field, search, exact_match)
where_clause = f"WHERE {condition}"
search_params.extend(params)
# 构建基础查询
for year in available_years:
table_name = f"bilibili_history_{year}"
sub_query = f"SELECT * FROM {table_name} {where_clause}"
sub_queries.append(sub_query)
# 为每个子查询添加一组参数
base_params.extend(search_params)
base_query = f"{' UNION ALL '.join(sub_queries)}"
print("\n=== 基础查询 ===")
print(f"SQL: {base_query}")
print(f"参数: {base_params}")
print(f"参数数量: {len(base_params)}")
print("================\n")
# 获取总记录数
count_query = f"SELECT COUNT(*) FROM ({base_query})"
print("\n=== 计数查询 ===")
print(f"SQL: {count_query}")
print(f"参数: {base_params}")
print("================\n")
cursor.execute(count_query, base_params)
total = cursor.fetchone()[0]
# 构建最终查询,按观看时间排序和分页
params = base_params.copy()
query = f"""
SELECT * FROM ({base_query})
ORDER BY view_at {('ASC' if sortOrder == 1 else 'DESC')}
"""
# 添加分页
query += " LIMIT ? OFFSET ?"
params.extend([size, (page - 1) * size])
print("\n=== 最终查询 ===")
print(f"SQL: {query}")
print(f"参数: {params}")
print(f"参数数量: {len(params)}")
print("================\n")
# 执行查询
cursor.execute(query, params)
columns = [description[0] for description in cursor.description]
records = []
for row in cursor.fetchall():
record = dict(zip(columns, row))
record = _process_record(record, use_local_images, use_sessdata)
records.append(record)
return {
"status": "success",
"data": {
"records": records,
"total": total,
"size": size,
"current": page,
"available_years": available_years,
"search_info": {
"keyword": search,
"type": search_type,
"exact_match": False,
"sort_by": "view_at"
}
}
}
except sqlite3.Error as e:
error_msg = f"数据库错误: {str(e)}"
print(f"\n=== 数据库错误 ===\n{error_msg}\n=================\n")
return {"status": "error", "message": error_msg}
finally:
if conn:
conn.close()
@router.get("/remarks", summary="获取所有备注")
async def get_all_remarks(
page: int = Query(1, description="当前页码"),
size: int = Query(10, description="每页记录数"),
sort_order: int = Query(0, description="排序顺序,0为降序,1为升序")
):
"""获取所有带有备注的视频记录
Args:
page: 当前页码
size: 每页记录数
sort_order: 排序顺序,0为降序,1为升序
Returns:
dict: 包含分页的备注记录列表
"""
try:
conn = get_db()
cursor = conn.cursor()
# 获取所有年份的表
years = get_available_years()
if not years:
return {
"status": "error",
"message": "未找到任何历史记录数据"
}
# 构建UNION ALL查询,只查询有备注的记录
queries = []
for year in years:
table_name = f"bilibili_history_{year}"
queries.append(f"""
SELECT *
FROM {table_name}
WHERE remark != ''
""")
# 组合所有查询,添加排序和分页
base_query = " UNION ALL ".join(queries)
count_query = f"SELECT COUNT(*) FROM ({base_query})"
# 添加排序和分页(按备注时间排序)
final_query = f"""
SELECT * FROM ({base_query})
ORDER BY remark_time {('ASC' if sort_order == 1 else 'DESC')}
LIMIT ? OFFSET ?
"""
# 获取总记录数
cursor.execute(count_query)
total = cursor.fetchone()[0]
# 执行分页查询
cursor.execute(final_query, [size, (page - 1) * size])
# 获取列名
columns = [description[0] for description in cursor.description]
records = []
# 构建记录
for row in cursor.fetchall():
record = dict(zip(columns, row))
# 解析JSON字符串
if 'covers' in record and record['covers']:
try:
record['covers'] = json.loads(record['covers'])
except json.JSONDecodeError:
record['covers'] = []
records.append(record)
return {
"status": "success",
"data": {
"records": records,
"total": total,
"size": size,
"current": page
}
}
except sqlite3.Error as e:
raise HTTPException(
status_code=500,
detail=f"数据库操作失败: {str(e)}"
)
finally:
if conn:
conn.close()
class UpdateRemarkRequest(BaseModel):
bvid: str
view_at: int
remark: str
@router.post("/update-remark", summary="更新视频备注")
async def update_video_remark(request: UpdateRemarkRequest):
"""更新视频备注
Args:
request: 包含bvid、view_at和remark的请求体
Returns:
dict: 更新操作的结果
"""
try:
conn = get_db()
cursor = conn.cursor()
# 从时间戳获取年份
year = datetime.fromtimestamp(request.view_at).year
table_name = f"bilibili_history_{year}"
# 检查表是否存在
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND name=?
""", (table_name,))
if not cursor.fetchone():
raise HTTPException(
status_code=404,
detail=f"未找到 {year} 年的历史记录数据"
)
# 更新备注和备注时间
current_time = int(datetime.now().timestamp())
query = f"""
UPDATE {table_name}
SET remark = ?, remark_time = ?
WHERE bvid = ? AND view_at = ?
"""
cursor.execute(query, (request.remark, current_time, request.bvid, request.view_at))
conn.commit()
if cursor.rowcount == 0:
raise HTTPException(
status_code=404,
detail="未找到指定的视频记录"
)
return {
"status": "success",
"message": "备注更新成功",
"data": {
"bvid": request.bvid,
"view_at": request.view_at,
"remark": request.remark,
"remark_time": current_time
}
}
except sqlite3.Error as e:
raise HTTPException(
status_code=500,
detail=f"数据库操作失败: {str(e)}"
)
finally:
if conn:
conn.close()
@router.post("/reset-database", summary="重置数据库")
async def reset_database():
"""重置数据库
删除现有的数据库文件和last_import.json文件,用于重新导入数据
Returns:
dict: 操作结果
"""
try:
# 获取文件路径
db_path = get_output_path(config['db_file'])
last_import_path = get_output_path('last_import.json')
# 删除数据库文件
if os.path.exists(db_path):
try:
os.remove(db_path)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"删除数据库文件失败: {str(e)}"
)
# 删除last_import.json文件
if os.path.exists(last_import_path):
try:
os.remove(last_import_path)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"删除last_import.json文件失败: {str(e)}"
)
return {
"status": "success",
"message": "数据库已重置",
"data": {
"deleted_files": [
os.path.basename(db_path),
os.path.basename(last_import_path)
]
}
}
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"重置数据库失败: {str(e)}"
)
@router.get("/sqlite-version", summary="获取SQLite版本")
async def get_sqlite_version():
"""获取 SQLite 版本信息"""
try:
conn = get_db()
cursor = conn.cursor()
version_info = {
"sqlite_version": None,
"user_version": None,
"database_settings": {
"journal_mode": None,
"synchronous": None,
"legacy_format": None,
"page_size": None,
"cache_size": None,
"encoding": None
}
}
# 获取 SQLite 版本信息
try:
cursor.execute('SELECT sqlite_version()')
result = cursor.fetchone()
version_info["sqlite_version"] = result[0] if result else "未知"
except sqlite3.Error as e:
print(f"获取 SQLite 版本失败: {e}")
# 获取所有 PRAGMA 设置
pragmas = {
"user_version": None,
"journal_mode": "journal_mode",
"synchronous": "synchronous",
"legacy_file_format": "legacy_format",
"page_size": "page_size",
"cache_size": "cache_size",
"encoding": "encoding"
}
# 获取用户版本
try:
cursor.execute('PRAGMA user_version')
result = cursor.fetchone()
version_info["user_version"] = result[0] if result else 0
except sqlite3.Error as e:
print(f"获取用户版本失败: {e}")
# 获取其他 PRAGMA 设置
for pragma_name, setting_name in pragmas.items():
if setting_name: # 跳过已经处理的 user_version
try:
cursor.execute(f'PRAGMA {pragma_name}')
result = cursor.fetchone()
if result is not None:
value = result[0]
# 特殊处理某些值
if pragma_name == "legacy_file_format":
value = bool(int(value)) if value is not None else False
elif pragma_name == "synchronous":
value = {0: "OFF", 1: "NORMAL", 2: "FULL"}.get(value, value)
version_info["database_settings"][setting_name] = value
except sqlite3.Error as e:
print(f"获取 {pragma_name} 设置失败: {e}")
# 获取数据库文件信息
db_path = get_output_path(config['db_file'])
db_exists = os.path.exists(db_path)
db_size = os.path.getsize(db_path) if db_exists else 0
# 添加数据库文件信息
version_info["database_file"] = {
"exists": db_exists,
"size_bytes": db_size,
"size_mb": round(db_size / (1024 * 1024), 2) if db_exists else 0,
"path": db_path
}
return {
"status": "success",
"data": version_info
}
except Exception as e:
return {
"status": "error",
"message": f"获取版本信息失败: {str(e)}"
}
finally:
if conn:
conn.close()
class BatchRemarksRequest(BaseModel):
items: list[dict]
@router.post("/batch-remarks", summary="批量获取视频备注")
async def get_video_remarks(request: BatchRemarksRequest):
"""批量获取视频备注
Args:
request: 包含 items 列表的请求体,每个 item 包含 bvid 和 view_at
Returns:
dict: 包含所有视频备注信息的响应
"""
try:
conn = get_db()
cursor = conn.cursor()
# 按年份分组记录
records_by_year = {}
for record in request.items:
year = datetime.fromtimestamp(record['view_at']).year
if year not in records_by_year:
records_by_year[year] = []
records_by_year[year].append((record['bvid'], record['view_at']))
# 存储所有查询结果
results = {}
# 处理每个年份的数据
for year, year_records in records_by_year.items():
table_name = f"bilibili_history_{year}"
# 检查表是否存在
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND name=?
""", (table_name,))
if not cursor.fetchone():
print(f"未找到 {year} 年的历史记录数据")
continue
# 为每个(bvid, view_at)对执行单独的查询
for bvid, view_at in year_records:
query = f"""
SELECT bvid, view_at, title, remark, remark_time
FROM {table_name}
WHERE bvid = ? AND view_at = ?
"""
cursor.execute(query, (bvid, view_at))
row = cursor.fetchone()
if row:
bvid, view_at, title, remark, remark_time = row
results[f"{bvid}_{view_at}"] = {
"bvid": bvid,
"view_at": view_at,
"title": title,
"remark": remark,
"remark_time": remark_time
}
return {
"status": "success",
"data": results
}
except sqlite3.Error as e:
raise HTTPException(
status_code=500,
detail=f"数据库操作失败: {str(e)}"
)
finally:
if conn:
conn.close()
@router.get("/by_cid/{cid}", summary="根据CID查询视频详情")
async def get_video_by_cid(
cid: int,
use_local_images: bool = Query(False, description="是否使用本地图片"),
use_sessdata: bool = Query(True, description="是否在图片URL中使用SESSDATA")
):
"""
根据视频CID查询详细信息
Args:
cid: 视频的CID
use_local_images: 是否使用本地图片
use_sessdata: 是否在图片URL中使用SESSDATA
Returns:
视频的详细信息,包括标题、封面和作者信息
"""
print(f"【调试】开始根据CID={cid}查询视频信息")
try:
conn = get_db()
cursor = conn.cursor()
# 获取所有年份表
years = get_available_years()
if not years:
print(f"【调试】未找到任何历史记录数据表")
return {
"status": "error",
"message": "未找到任何历史记录数据"
}
print(f"【调试】找到以下年份表: {years}")
# 构建UNION ALL查询
queries = []
for year in years:
table_name = f"bilibili_history_{year}"
# 选择与视频相关的所有字段
queries.append(f"""
SELECT
id, title, long_title, cover, covers, uri, oid, epid, bvid, page,
cid, part, business, dt, videos, author_name, author_face, author_mid,
view_at, progress, badge, show_title, duration, current, total,
new_desc, is_finish, is_fav, kid, tag_name, live_status, main_category
FROM {table_name}
WHERE cid = {cid}
""")
# 组合所有查询,LIMIT放在最后,不使用括号
union_query = f"{' UNION ALL '.join(queries)} LIMIT 1"
print(f"【调试】CID查询SQL: {union_query}")
# 执行查询
cursor.execute(union_query)
columns = [description[0] for description in cursor.description]
record = cursor.fetchone()
if not record:
print(f"【调试】未找到CID为{cid}的视频记录")
return {
"status": "error",
"message": f"未找到CID为{cid}的视频记录"
}
print(f"【调试】找到CID={cid}的记录,开始处理")
# 构建记录字典
record_dict = dict(zip(columns, record))
# 打印记录内容以便调试
print(f"【调试】原始记录: {record_dict}")
# 处理图片URL
record_dict = _process_record(record_dict, use_local_images, use_sessdata)
# 添加视频的播放时间(人类可读格式)
if 'view_at' in record_dict and record_dict['view_at']:
record_dict['view_time'] = datetime.fromtimestamp(record_dict['view_at']).strftime("%Y-%m-%d %H:%M:%S")
print(f"【调试】处理后的记录: {record_dict}")
conn.close()
return {
"status": "success",
"data": record_dict
}
except Exception as e:
print(f"查询CID时出错: {str(e)}")
if 'conn' in locals() and conn:
conn.close()
return {
"status": "error",
"message": f"查询视频详情时出错: {str(e)}"
}
|
2929004360/ruoyi-sign
| 4,203
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/html/EscapeUtil.java
|
package com.ruoyi.common.utils.html;
import com.ruoyi.common.utils.StringUtils;
/**
* 转义和反转义工具类
*
* @author ruoyi
*/
public class EscapeUtil
{
public static final String RE_HTML_MARK = "(<[^<]*?>)|(<[\\s]*?/[^<]*?>)|(<[^<]*?/[\\s]*?>)";
private static final char[][] TEXT = new char[64][];
static
{
for (int i = 0; i < 64; i++)
{
TEXT[i] = new char[] { (char) i };
}
// special HTML characters
TEXT['\''] = "'".toCharArray(); // 单引号
TEXT['"'] = """.toCharArray(); // 双引号
TEXT['&'] = "&".toCharArray(); // &符
TEXT['<'] = "<".toCharArray(); // 小于号
TEXT['>'] = ">".toCharArray(); // 大于号
}
/**
* 转义文本中的HTML字符为安全的字符
*
* @param text 被转义的文本
* @return 转义后的文本
*/
public static String escape(String text)
{
return encode(text);
}
/**
* 还原被转义的HTML特殊字符
*
* @param content 包含转义符的HTML内容
* @return 转换后的字符串
*/
public static String unescape(String content)
{
return decode(content);
}
/**
* 清除所有HTML标签,但是不删除标签内的内容
*
* @param content 文本
* @return 清除标签后的文本
*/
public static String clean(String content)
{
return new HTMLFilter().filter(content);
}
/**
* Escape编码
*
* @param text 被编码的文本
* @return 编码后的字符
*/
private static String encode(String text)
{
if (StringUtils.isEmpty(text))
{
return StringUtils.EMPTY;
}
final StringBuilder tmp = new StringBuilder(text.length() * 6);
char c;
for (int i = 0; i < text.length(); i++)
{
c = text.charAt(i);
if (c < 256)
{
tmp.append("%");
if (c < 16)
{
tmp.append("0");
}
tmp.append(Integer.toString(c, 16));
}
else
{
tmp.append("%u");
if (c <= 0xfff)
{
// issue#I49JU8@Gitee
tmp.append("0");
}
tmp.append(Integer.toString(c, 16));
}
}
return tmp.toString();
}
/**
* Escape解码
*
* @param content 被转义的内容
* @return 解码后的字符串
*/
public static String decode(String content)
{
if (StringUtils.isEmpty(content))
{
return content;
}
StringBuilder tmp = new StringBuilder(content.length());
int lastPos = 0, pos = 0;
char ch;
while (lastPos < content.length())
{
pos = content.indexOf("%", lastPos);
if (pos == lastPos)
{
if (content.charAt(pos + 1) == 'u')
{
ch = (char) Integer.parseInt(content.substring(pos + 2, pos + 6), 16);
tmp.append(ch);
lastPos = pos + 6;
}
else
{
ch = (char) Integer.parseInt(content.substring(pos + 1, pos + 3), 16);
tmp.append(ch);
lastPos = pos + 3;
}
}
else
{
if (pos == -1)
{
tmp.append(content.substring(lastPos));
lastPos = content.length();
}
else
{
tmp.append(content.substring(lastPos, pos));
lastPos = pos;
}
}
}
return tmp.toString();
}
public static void main(String[] args)
{
String html = "<script>alert(1);</script>";
String escape = EscapeUtil.escape(html);
// String html = "<scr<script>ipt>alert(\"XSS\")</scr<script>ipt>";
// String html = "<123";
// String html = "123>";
System.out.println("clean: " + EscapeUtil.clean(html));
System.out.println("escape: " + escape);
System.out.println("unescape: " + EscapeUtil.unescape(escape));
}
}
|
2929004360/ruoyi-sign
| 19,748
|
ruoyi-common/src/main/java/com/ruoyi/common/utils/html/HTMLFilter.java
|
package com.ruoyi.common.utils.html;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* HTML过滤器,用于去除XSS漏洞隐患。
*
* @author ruoyi
*/
public final class HTMLFilter
{
/**
* regex flag union representing /si modifiers in php
**/
private static final int REGEX_FLAGS_SI = Pattern.CASE_INSENSITIVE | Pattern.DOTALL;
private static final Pattern P_COMMENTS = Pattern.compile("<!--(.*?)-->", Pattern.DOTALL);
private static final Pattern P_COMMENT = Pattern.compile("^!--(.*)--$", REGEX_FLAGS_SI);
private static final Pattern P_TAGS = Pattern.compile("<(.*?)>", Pattern.DOTALL);
private static final Pattern P_END_TAG = Pattern.compile("^/([a-z0-9]+)", REGEX_FLAGS_SI);
private static final Pattern P_START_TAG = Pattern.compile("^([a-z0-9]+)(.*?)(/?)$", REGEX_FLAGS_SI);
private static final Pattern P_QUOTED_ATTRIBUTES = Pattern.compile("([a-z0-9]+)=([\"'])(.*?)\\2", REGEX_FLAGS_SI);
private static final Pattern P_UNQUOTED_ATTRIBUTES = Pattern.compile("([a-z0-9]+)(=)([^\"\\s']+)", REGEX_FLAGS_SI);
private static final Pattern P_PROTOCOL = Pattern.compile("^([^:]+):", REGEX_FLAGS_SI);
private static final Pattern P_ENTITY = Pattern.compile("&#(\\d+);?");
private static final Pattern P_ENTITY_UNICODE = Pattern.compile("&#x([0-9a-f]+);?");
private static final Pattern P_ENCODE = Pattern.compile("%([0-9a-f]{2});?");
private static final Pattern P_VALID_ENTITIES = Pattern.compile("&([^&;]*)(?=(;|&|$))");
private static final Pattern P_VALID_QUOTES = Pattern.compile("(>|^)([^<]+?)(<|$)", Pattern.DOTALL);
private static final Pattern P_END_ARROW = Pattern.compile("^>");
private static final Pattern P_BODY_TO_END = Pattern.compile("<([^>]*?)(?=<|$)");
private static final Pattern P_XML_CONTENT = Pattern.compile("(^|>)([^<]*?)(?=>)");
private static final Pattern P_STRAY_LEFT_ARROW = Pattern.compile("<([^>]*?)(?=<|$)");
private static final Pattern P_STRAY_RIGHT_ARROW = Pattern.compile("(^|>)([^<]*?)(?=>)");
private static final Pattern P_AMP = Pattern.compile("&");
private static final Pattern P_QUOTE = Pattern.compile("\"");
private static final Pattern P_LEFT_ARROW = Pattern.compile("<");
private static final Pattern P_RIGHT_ARROW = Pattern.compile(">");
private static final Pattern P_BOTH_ARROWS = Pattern.compile("<>");
// @xxx could grow large... maybe use sesat's ReferenceMap
private static final ConcurrentMap<String, Pattern> P_REMOVE_PAIR_BLANKS = new ConcurrentHashMap<>();
private static final ConcurrentMap<String, Pattern> P_REMOVE_SELF_BLANKS = new ConcurrentHashMap<>();
/**
* set of allowed html elements, along with allowed attributes for each element
**/
private final Map<String, List<String>> vAllowed;
/**
* counts of open tags for each (allowable) html element
**/
private final Map<String, Integer> vTagCounts = new HashMap<>();
/**
* html elements which must always be self-closing (e.g. "<img />")
**/
private final String[] vSelfClosingTags;
/**
* html elements which must always have separate opening and closing tags (e.g. "<b></b>")
**/
private final String[] vNeedClosingTags;
/**
* set of disallowed html elements
**/
private final String[] vDisallowed;
/**
* attributes which should be checked for valid protocols
**/
private final String[] vProtocolAtts;
/**
* allowed protocols
**/
private final String[] vAllowedProtocols;
/**
* tags which should be removed if they contain no content (e.g. "<b></b>" or "<b />")
**/
private final String[] vRemoveBlanks;
/**
* entities allowed within html markup
**/
private final String[] vAllowedEntities;
/**
* flag determining whether comments are allowed in input String.
*/
private final boolean stripComment;
private final boolean encodeQuotes;
/**
* flag determining whether to try to make tags when presented with "unbalanced" angle brackets (e.g. "<b text </b>"
* becomes "<b> text </b>"). If set to false, unbalanced angle brackets will be html escaped.
*/
private final boolean alwaysMakeTags;
/**
* Default constructor.
*/
public HTMLFilter()
{
vAllowed = new HashMap<>();
final ArrayList<String> a_atts = new ArrayList<>();
a_atts.add("href");
a_atts.add("target");
vAllowed.put("a", a_atts);
final ArrayList<String> img_atts = new ArrayList<>();
img_atts.add("src");
img_atts.add("width");
img_atts.add("height");
img_atts.add("alt");
vAllowed.put("img", img_atts);
final ArrayList<String> no_atts = new ArrayList<>();
vAllowed.put("b", no_atts);
vAllowed.put("strong", no_atts);
vAllowed.put("i", no_atts);
vAllowed.put("em", no_atts);
vSelfClosingTags = new String[] { "img" };
vNeedClosingTags = new String[] { "a", "b", "strong", "i", "em" };
vDisallowed = new String[] {};
vAllowedProtocols = new String[] { "http", "mailto", "https" }; // no ftp.
vProtocolAtts = new String[] { "src", "href" };
vRemoveBlanks = new String[] { "a", "b", "strong", "i", "em" };
vAllowedEntities = new String[] { "amp", "gt", "lt", "quot" };
stripComment = true;
encodeQuotes = true;
alwaysMakeTags = false;
}
/**
* Map-parameter configurable constructor.
*
* @param conf map containing configuration. keys match field names.
*/
@SuppressWarnings("unchecked")
public HTMLFilter(final Map<String, Object> conf)
{
assert conf.containsKey("vAllowed") : "configuration requires vAllowed";
assert conf.containsKey("vSelfClosingTags") : "configuration requires vSelfClosingTags";
assert conf.containsKey("vNeedClosingTags") : "configuration requires vNeedClosingTags";
assert conf.containsKey("vDisallowed") : "configuration requires vDisallowed";
assert conf.containsKey("vAllowedProtocols") : "configuration requires vAllowedProtocols";
assert conf.containsKey("vProtocolAtts") : "configuration requires vProtocolAtts";
assert conf.containsKey("vRemoveBlanks") : "configuration requires vRemoveBlanks";
assert conf.containsKey("vAllowedEntities") : "configuration requires vAllowedEntities";
vAllowed = Collections.unmodifiableMap((HashMap<String, List<String>>) conf.get("vAllowed"));
vSelfClosingTags = (String[]) conf.get("vSelfClosingTags");
vNeedClosingTags = (String[]) conf.get("vNeedClosingTags");
vDisallowed = (String[]) conf.get("vDisallowed");
vAllowedProtocols = (String[]) conf.get("vAllowedProtocols");
vProtocolAtts = (String[]) conf.get("vProtocolAtts");
vRemoveBlanks = (String[]) conf.get("vRemoveBlanks");
vAllowedEntities = (String[]) conf.get("vAllowedEntities");
stripComment = conf.containsKey("stripComment") ? (Boolean) conf.get("stripComment") : true;
encodeQuotes = conf.containsKey("encodeQuotes") ? (Boolean) conf.get("encodeQuotes") : true;
alwaysMakeTags = conf.containsKey("alwaysMakeTags") ? (Boolean) conf.get("alwaysMakeTags") : true;
}
private void reset()
{
vTagCounts.clear();
}
// ---------------------------------------------------------------
// my versions of some PHP library functions
public static String chr(final int decimal)
{
return String.valueOf((char) decimal);
}
public static String htmlSpecialChars(final String s)
{
String result = s;
result = regexReplace(P_AMP, "&", result);
result = regexReplace(P_QUOTE, """, result);
result = regexReplace(P_LEFT_ARROW, "<", result);
result = regexReplace(P_RIGHT_ARROW, ">", result);
return result;
}
// ---------------------------------------------------------------
/**
* given a user submitted input String, filter out any invalid or restricted html.
*
* @param input text (i.e. submitted by a user) than may contain html
* @return "clean" version of input, with only valid, whitelisted html elements allowed
*/
public String filter(final String input)
{
reset();
String s = input;
s = escapeComments(s);
s = balanceHTML(s);
s = checkTags(s);
s = processRemoveBlanks(s);
// s = validateEntities(s);
return s;
}
public boolean isAlwaysMakeTags()
{
return alwaysMakeTags;
}
public boolean isStripComments()
{
return stripComment;
}
private String escapeComments(final String s)
{
final Matcher m = P_COMMENTS.matcher(s);
final StringBuffer buf = new StringBuffer();
if (m.find())
{
final String match = m.group(1); // (.*?)
m.appendReplacement(buf, Matcher.quoteReplacement("<!--" + htmlSpecialChars(match) + "-->"));
}
m.appendTail(buf);
return buf.toString();
}
private String balanceHTML(String s)
{
if (alwaysMakeTags)
{
//
// try and form html
//
s = regexReplace(P_END_ARROW, "", s);
// 不追加结束标签
s = regexReplace(P_BODY_TO_END, "<$1>", s);
s = regexReplace(P_XML_CONTENT, "$1<$2", s);
}
else
{
//
// escape stray brackets
//
s = regexReplace(P_STRAY_LEFT_ARROW, "<$1", s);
s = regexReplace(P_STRAY_RIGHT_ARROW, "$1$2><", s);
//
// the last regexp causes '<>' entities to appear
// (we need to do a lookahead assertion so that the last bracket can
// be used in the next pass of the regexp)
//
s = regexReplace(P_BOTH_ARROWS, "", s);
}
return s;
}
private String checkTags(String s)
{
Matcher m = P_TAGS.matcher(s);
final StringBuffer buf = new StringBuffer();
while (m.find())
{
String replaceStr = m.group(1);
replaceStr = processTag(replaceStr);
m.appendReplacement(buf, Matcher.quoteReplacement(replaceStr));
}
m.appendTail(buf);
// these get tallied in processTag
// (remember to reset before subsequent calls to filter method)
final StringBuilder sBuilder = new StringBuilder(buf.toString());
for (String key : vTagCounts.keySet())
{
for (int ii = 0; ii < vTagCounts.get(key); ii++)
{
sBuilder.append("</").append(key).append(">");
}
}
s = sBuilder.toString();
return s;
}
private String processRemoveBlanks(final String s)
{
String result = s;
for (String tag : vRemoveBlanks)
{
if (!P_REMOVE_PAIR_BLANKS.containsKey(tag))
{
P_REMOVE_PAIR_BLANKS.putIfAbsent(tag, Pattern.compile("<" + tag + "(\\s[^>]*)?></" + tag + ">"));
}
result = regexReplace(P_REMOVE_PAIR_BLANKS.get(tag), "", result);
if (!P_REMOVE_SELF_BLANKS.containsKey(tag))
{
P_REMOVE_SELF_BLANKS.putIfAbsent(tag, Pattern.compile("<" + tag + "(\\s[^>]*)?/>"));
}
result = regexReplace(P_REMOVE_SELF_BLANKS.get(tag), "", result);
}
return result;
}
private static String regexReplace(final Pattern regex_pattern, final String replacement, final String s)
{
Matcher m = regex_pattern.matcher(s);
return m.replaceAll(replacement);
}
private String processTag(final String s)
{
// ending tags
Matcher m = P_END_TAG.matcher(s);
if (m.find())
{
final String name = m.group(1).toLowerCase();
if (allowed(name))
{
if (!inArray(name, vSelfClosingTags))
{
if (vTagCounts.containsKey(name))
{
vTagCounts.put(name, vTagCounts.get(name) - 1);
return "</" + name + ">";
}
}
}
}
// starting tags
m = P_START_TAG.matcher(s);
if (m.find())
{
final String name = m.group(1).toLowerCase();
final String body = m.group(2);
String ending = m.group(3);
// debug( "in a starting tag, name='" + name + "'; body='" + body + "'; ending='" + ending + "'" );
if (allowed(name))
{
final StringBuilder params = new StringBuilder();
final Matcher m2 = P_QUOTED_ATTRIBUTES.matcher(body);
final Matcher m3 = P_UNQUOTED_ATTRIBUTES.matcher(body);
final List<String> paramNames = new ArrayList<>();
final List<String> paramValues = new ArrayList<>();
while (m2.find())
{
paramNames.add(m2.group(1)); // ([a-z0-9]+)
paramValues.add(m2.group(3)); // (.*?)
}
while (m3.find())
{
paramNames.add(m3.group(1)); // ([a-z0-9]+)
paramValues.add(m3.group(3)); // ([^\"\\s']+)
}
String paramName, paramValue;
for (int ii = 0; ii < paramNames.size(); ii++)
{
paramName = paramNames.get(ii).toLowerCase();
paramValue = paramValues.get(ii);
// debug( "paramName='" + paramName + "'" );
// debug( "paramValue='" + paramValue + "'" );
// debug( "allowed? " + vAllowed.get( name ).contains( paramName ) );
if (allowedAttribute(name, paramName))
{
if (inArray(paramName, vProtocolAtts))
{
paramValue = processParamProtocol(paramValue);
}
params.append(' ').append(paramName).append("=\\\"").append(paramValue).append("\\\"");
}
}
if (inArray(name, vSelfClosingTags))
{
ending = " /";
}
if (inArray(name, vNeedClosingTags))
{
ending = "";
}
if (ending == null || ending.length() < 1)
{
if (vTagCounts.containsKey(name))
{
vTagCounts.put(name, vTagCounts.get(name) + 1);
}
else
{
vTagCounts.put(name, 1);
}
}
else
{
ending = " /";
}
return "<" + name + params + ending + ">";
}
else
{
return "";
}
}
// comments
m = P_COMMENT.matcher(s);
if (!stripComment && m.find())
{
return "<" + m.group() + ">";
}
return "";
}
private String processParamProtocol(String s)
{
s = decodeEntities(s);
final Matcher m = P_PROTOCOL.matcher(s);
if (m.find())
{
final String protocol = m.group(1);
if (!inArray(protocol, vAllowedProtocols))
{
// bad protocol, turn into local anchor link instead
s = "#" + s.substring(protocol.length() + 1);
if (s.startsWith("#//"))
{
s = "#" + s.substring(3);
}
}
}
return s;
}
private String decodeEntities(String s)
{
StringBuffer buf = new StringBuffer();
Matcher m = P_ENTITY.matcher(s);
while (m.find())
{
final String match = m.group(1);
final int decimal = Integer.decode(match).intValue();
m.appendReplacement(buf, Matcher.quoteReplacement(chr(decimal)));
}
m.appendTail(buf);
s = buf.toString();
buf = new StringBuffer();
m = P_ENTITY_UNICODE.matcher(s);
while (m.find())
{
final String match = m.group(1);
final int decimal = Integer.valueOf(match, 16).intValue();
m.appendReplacement(buf, Matcher.quoteReplacement(chr(decimal)));
}
m.appendTail(buf);
s = buf.toString();
buf = new StringBuffer();
m = P_ENCODE.matcher(s);
while (m.find())
{
final String match = m.group(1);
final int decimal = Integer.valueOf(match, 16).intValue();
m.appendReplacement(buf, Matcher.quoteReplacement(chr(decimal)));
}
m.appendTail(buf);
s = buf.toString();
s = validateEntities(s);
return s;
}
private String validateEntities(final String s)
{
StringBuffer buf = new StringBuffer();
// validate entities throughout the string
Matcher m = P_VALID_ENTITIES.matcher(s);
while (m.find())
{
final String one = m.group(1); // ([^&;]*)
final String two = m.group(2); // (?=(;|&|$))
m.appendReplacement(buf, Matcher.quoteReplacement(checkEntity(one, two)));
}
m.appendTail(buf);
return encodeQuotes(buf.toString());
}
private String encodeQuotes(final String s)
{
if (encodeQuotes)
{
StringBuffer buf = new StringBuffer();
Matcher m = P_VALID_QUOTES.matcher(s);
while (m.find())
{
final String one = m.group(1); // (>|^)
final String two = m.group(2); // ([^<]+?)
final String three = m.group(3); // (<|$)
// 不替换双引号为",防止json格式无效 regexReplace(P_QUOTE, """, two)
m.appendReplacement(buf, Matcher.quoteReplacement(one + two + three));
}
m.appendTail(buf);
return buf.toString();
}
else
{
return s;
}
}
private String checkEntity(final String preamble, final String term)
{
return ";".equals(term) && isValidEntity(preamble) ? '&' + preamble : "&" + preamble;
}
private boolean isValidEntity(final String entity)
{
return inArray(entity, vAllowedEntities);
}
private static boolean inArray(final String s, final String[] array)
{
for (String item : array)
{
if (item != null && item.equals(s))
{
return true;
}
}
return false;
}
private boolean allowed(final String name)
{
return (vAllowed.isEmpty() || vAllowed.containsKey(name)) && !inArray(name, vDisallowed);
}
private boolean allowedAttribute(final String name, final String paramName)
{
return allowed(name) && (vAllowed.isEmpty() || vAllowed.get(name).contains(paramName));
}
}
|
Subsets and Splits
HTML Files in Train Set
Retrieves all records from the dataset where the file path ends with .html or .htm, providing a basic filter for HTML files.
SQL Console for nick007x/github-code-2025
Retrieves 200 file paths that end with '.html' or '.htm', providing a basic overview of HTML files in the dataset.
Top HTML Files
The query retrieves a sample of HTML file paths, providing basic filtering but limited analytical value.
CSharp Repositories Excluding Unity
Retrieves all records for repositories that contain C# files but are not related to Unity, providing a basic filter of the dataset.
C# File Count per Repository
Counts the total number of C# files across distinct repositories, providing a basic measure of C# file presence.
SQL Console for nick007x/github-code-2025
Lists unique repository IDs containing C# files, providing basic filtering to understand which repositories have C# code.
Select Groovy Files: Train Set
Retrieves the first 1000 entries from the 'train' dataset where the file path ends with '.groovy', providing a basic sample of Groovy files.
GitHub Repos with WiFiClientSecure
Finds specific file paths in repositories that contain particular code snippets related to WiFiClientSecure and ChatGPT, providing basic filtering of relevant files.