Skip to content

Commit

Permalink
🎨 改善代码
Browse files Browse the repository at this point in the history
✨ Introducing new features.
  - qb手动文件上传 支持 私聊上传到指定群号
🐛 Fixing a bug.
  - 修复配置项 LIMIT、DOWN_STATUS_MSG_GROUP 的拼写错误

注意:
此次提交未进行充分测试!不排除引入了新BUG

更多修改请查看 commits
  • Loading branch information
Quan666 committed May 7, 2021
1 parent 8b42109 commit 04a89a2
Show file tree
Hide file tree
Showing 19 changed files with 337 additions and 337 deletions.
4 changes: 2 additions & 2 deletions .env.prod
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ RSS_PROXY = '127.0.0.1:7890' # 代理地址
RSSHUB = 'https://rsshub.app' # rsshub订阅地址
RSSHUB_backup = [] # 备用rsshub地址 填写示例 ["https://rsshub.app","https://rsshub.app"],务必使用双引号!!!
DB_CACHE_EXPIRE = 30 # 去重数据库的记录清理限定天数
LIMT = 50 # 缓存rss条数
LIMIT = 50 # 缓存rss条数


# 非 GIF 图片压缩后的最大长宽值
Expand Down Expand Up @@ -43,7 +43,7 @@ qb_web_url='http://127.0.0.1:8081' #qbittorrent 客户端默认是关闭状态
down_status_msg_group=[] # 下载进度消息提示群组 示例 [12345678] 注意:最好是将该群设置为免打扰
down_status_msg_date=10 # 下载进度检查及提示间隔时间,秒,不建议小于 10s

VERSION = 'v2.2.1'
VERSION = 'v2.2.2'

# MYELF博客地址 https://myelf.club
# 出现问题请在 GitHub 上提 issues
Expand Down
12 changes: 6 additions & 6 deletions src/plugins/ELF_RSS2/RSS/my_trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,27 +11,27 @@

# 检测某个rss更新 #任务体
@util.time_out(time=300) # 20s 任务超时时间
async def check_update(rss: rss_class.rss):
async def check_update(rss: rss_class.Rss):
logger.info('{} 检查更新'.format(rss.name))
await rss_parsing.start(rss)


async def delJob(rss: rss_class.rss):
async def delete_job(rss: rss_class.Rss):
scheduler = require("nonebot_plugin_apscheduler").scheduler
try:
scheduler.remove_job(rss.name)
except Exception as e:
logger.debug(e)


async def addJob(rss: rss_class.rss):
await delJob(rss)
async def add_job(rss: rss_class.Rss):
await delete_job(rss)
# 加入订阅任务队列,加入前判断是否存在群组或用户,二者不能同时为空
if len(rss.user_id) > 0 or len(rss.group_id) > 0:
rss_trigger(rss)


def rss_trigger(rss: rss_class.rss):
def rss_trigger(rss: rss_class.Rss):
if re.search(r'[_*/,-]', rss.time):
my_trigger_cron(rss)
return
Expand Down Expand Up @@ -61,7 +61,7 @@ def rss_trigger(rss: rss_class.rss):
# 参考 https://www.runoob.com/linux/linux-comm-crontab.html


def my_trigger_cron(rss: rss_class.rss):
def my_trigger_cron(rss: rss_class.Rss):
# 解析参数
tmp_list = rss.time.split('_')
times_list = ['*/5', '*', '*', '*', '*']
Expand Down
71 changes: 34 additions & 37 deletions src/plugins/ELF_RSS2/RSS/qbittorrent_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,12 @@
import base64
import datetime
import re
import time

import httpx
import nonebot
from apscheduler.triggers.interval import IntervalTrigger
from nonebot import logger, require
from qbittorrent import Client
# from starlette.responses import FileResponse
from ..config import config

# 计划
Expand All @@ -23,7 +21,7 @@

DOWN_STATUS_DOWNING = 1 # 下载中
DOWN_STATUS_UPLOADING = 2 # 上传中
DOWN_STATUS_UPLOADOK = 3 # 上传完成
DOWN_STATUS_UPLOAD_OK = 3 # 上传完成
down_info = {}


Expand All @@ -37,7 +35,7 @@
# }

# 发送通知
async def send_Msg(msg: str) -> list:
async def send_msg(msg: str) -> list:
logger.info(msg)
bot, = nonebot.get_bots().values()
msg_id = []
Expand All @@ -46,7 +44,7 @@ async def send_Msg(msg: str) -> list:
return msg_id


async def get_qb():
async def get_qb_client():
try:
qb = Client(config.qb_web_url)
qb.login()
Expand All @@ -68,7 +66,7 @@ async def get_qb():
return qb


def getSize(size: int) -> str:
def get_size(size: int) -> str:
kb = 1024
mb = kb * 1024
gb = mb * 1024
Expand Down Expand Up @@ -101,44 +99,44 @@ def get_torrent_b16Hash(content: bytes) -> str:
return b16Hash


async def get_Hash_Name(url: str, proxy=None) -> dict:
async def get_torrent_info_from_hash(url: str, proxy=None) -> dict:
if not proxy:
proxy = {}
qb = await get_qb()
qb = await get_qb_client()
info = None
if re.search(r"magnet:\?xt=urn:btih:", url):
qb.download_from_link(link=url)
hash = re.search('[a-f0-9]{40}', url)[0]
hash_str = re.search('[a-f0-9]{40}', url)[0]
else:
async with httpx.AsyncClient(proxies=proxy) as client:
try:
res = await client.get(url, timeout=100)
qb.download_from_file(res.content)
hash = get_torrent_b16Hash(res.content)
hash_str = get_torrent_b16Hash(res.content)
except Exception as e:
await send_Msg('下载种子失败,可能需要代理:{}'.format(e))
await send_msg('下载种子失败,可能需要代理:{}'.format(e))
return None

while not info:
for tmp_torrent in qb.torrents():
if tmp_torrent['hash'] == hash and tmp_torrent['size']:
if tmp_torrent['hash'] == hash_str and tmp_torrent['size']:
info = {
'hash': tmp_torrent['hash'],
'filename': tmp_torrent['name'],
'size': getSize(tmp_torrent['size'])
'size': get_size(tmp_torrent['size'])
}
await asyncio.sleep(1)
return info


# 种子地址,种子下载路径,群文件上传 群列表,订阅名称
async def start_down(url: str, path: str, group_ids: list, name: str, proxy=None) -> str:
qb = await get_qb()
qb = await get_qb_client()
if not qb:
return
# 获取种子 hash
info = await get_Hash_Name(url=url, proxy=proxy)
await rss_trigger(hash=info['hash'], group_ids=group_ids,
info = await get_torrent_info_from_hash(url=url, proxy=proxy)
await rss_trigger(hash_str=info['hash'], group_ids=group_ids,
name='订阅:{}\n{}\n文件大小:{}'.format(name, info['filename'], info['size']))
down_info[info['hash']] = {
"status": DOWN_STATUS_DOWNING,
Expand All @@ -149,17 +147,17 @@ async def start_down(url: str, path: str, group_ids: list, name: str, proxy=None


# 检查下载状态
async def check_down_status(hash: str, group_ids: list, name: str):
qb = await get_qb()
async def check_down_status(hash_str: str, group_ids: list, name: str):
qb = await get_qb_client()
if not qb:
return
info = qb.get_torrent(hash)
files = qb.get_torrent_files(hash)
info = qb.get_torrent(hash_str)
files = qb.get_torrent_files(hash_str)
bot, = nonebot.get_bots().values()
if info['total_downloaded'] - info['total_size'] >= 0.000000:
all_time = (datetime.datetime.now() - down_info[hash]['start_time']).seconds
await send_Msg(str('👏 {}\nHash: {} \n下载完成!耗时:{} s'.format(name, hash, str(all_time))))
down_info[hash]['status'] = DOWN_STATUS_UPLOADING
all_time = (datetime.datetime.now() - down_info[hash_str]['start_time']).seconds
await send_msg(str('👏 {}\nHash: {} \n下载完成!耗时:{} s'.format(name, hash_str, str(all_time))))
down_info[hash_str]['status'] = DOWN_STATUS_UPLOADING
for group_id in group_ids:
for tmp in files:
# 异常包起来防止超时报错导致后续不执行
Expand All @@ -168,18 +166,19 @@ async def check_down_status(hash: str, group_ids: list, name: str):
path = config.qb_down_path + tmp['name']
else:
path = info['save_path'] + tmp['name']
await send_Msg(str('{}\nHash: {} \n开始上传到群:{}'.format(name, hash, group_id)))
await send_msg(str('{}\nHash: {} \n开始上传到群:{}'.format(name, hash_str, group_id)))
await bot.call_api('upload_group_file', group_id=group_id, file=path, name=tmp['name'])
except Exception:
except TimeoutError as e:
logger.warning(e)
continue
scheduler = require("nonebot_plugin_apscheduler").scheduler
scheduler.remove_job(hash)
down_info[hash]['status'] = DOWN_STATUS_UPLOADOK
scheduler.remove_job(hash_str)
down_info[hash_str]['status'] = DOWN_STATUS_UPLOAD_OK
else:
await delete_msg(down_info[hash]['downing_tips_msg_id'])
msg_id = await send_Msg(str('{}\nHash: {} \n下载了 {}%\n平均下载速度:{} KB/s'.format(name, hash, round(
await delete_msg(down_info[hash_str]['downing_tips_msg_id'])
msg_id = await send_msg(str('{}\nHash: {} \n下载了 {}%\n平均下载速度:{} KB/s'.format(name, hash_str, round(
info['total_downloaded'] / info['total_size'] * 100, 2), round(info['dl_speed_avg'] / 1024, 2))))
down_info[hash]['downing_tips_msg_id'] = msg_id
down_info[hash_str]['downing_tips_msg_id'] = msg_id


# 撤回消息
Expand All @@ -189,24 +188,22 @@ async def delete_msg(msg_ids: list):
await bot.call_api('delete_msg', message_id=msg_id['message_id'])


async def rss_trigger(hash: str, group_ids: list, name: str):
async def rss_trigger(hash_str: str, group_ids: list, name: str):
scheduler = require("nonebot_plugin_apscheduler").scheduler
# 制作一个“time分钟/次”触发器
trigger = IntervalTrigger(
# minutes=1,
seconds=int(config.down_status_msg_date),
jitter=10
)
job_defaults = {'max_instances': 10}
job_defaults = {'max_instances': 1}
# 添加任务
scheduler.add_job(
func=check_down_status, # 要添加任务的函数,不要带参数
trigger=trigger, # 触发器
args=(hash, group_ids, name), # 函数的参数列表,注意:只有一个值时,不能省略末尾的逗号
id=hash,
# kwargs=None,
args=(hash_str, group_ids, name), # 函数的参数列表,注意:只有一个值时,不能省略末尾的逗号
id=hash_str,
misfire_grace_time=60, # 允许的误差时间,建议不要省略
# jobstore='default', # 任务储存库,在下一小节中说明
job_defaults=job_defaults,
)
await send_Msg(str('👏 {}\nHash: {} \n下载任务添加成功!'.format(name, hash)))
await send_msg(str('👏 {}\nHash: {} \n下载任务添加成功!'.format(name, hash_str)))
Loading

0 comments on commit 04a89a2

Please sign in to comment.