Skip to content

Commit

Permalink
Merge pull request #3580 from jxxghp/v2
Browse files Browse the repository at this point in the history
Sync
  • Loading branch information
DDSRem authored Dec 20, 2024
2 parents a3c048b + 8f05ea5 commit 7288dd2
Show file tree
Hide file tree
Showing 12 changed files with 142 additions and 124 deletions.
4 changes: 2 additions & 2 deletions app/api/endpoints/subscribe.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def delete_subscribe_by_mediaid(
# 发送事件
eventmanager.send_event(EventType.SubscribeDeleted, {
"subscribe_id": subscribe.id,
"subscribe": subscribe.to_dict()
"subscribe_info": subscribe.to_dict()
})
return schemas.Response(success=True)

Expand Down Expand Up @@ -521,7 +521,7 @@ def delete_subscribe(
# 发送事件
eventmanager.send_event(EventType.SubscribeDeleted, {
"subscribe_id": subscribe_id,
"subscribe": subscribe.to_dict()
"subscribe_info": subscribe.to_dict()
})
# 统计订阅
SubscribeHelper().sub_done_async({
Expand Down
61 changes: 35 additions & 26 deletions app/chain/subscribe.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,15 +395,17 @@ def update_subscribe_priority(self, subscribe: Subscribe, meta: MetaInfo,
return
# 当前下载资源的优先级
priority = max([item.torrent_info.pri_order for item in downloads])
# 订阅存在待定策略,不管是否已完成,均需更新订阅信息
self.subscribeoper.update(subscribe.id, {
"current_priority": priority,
"last_update": datetime.now().strftime('%Y-%m-%d %H:%M:%S')
})
if priority == 100:
# 洗版完成
self.__finish_subscribe(subscribe=subscribe, meta=meta, mediainfo=mediainfo, bestversion=True)
self.__finish_subscribe(subscribe=subscribe, meta=meta, mediainfo=mediainfo)
else:
# 正在洗版,更新资源优先级
logger.info(f'{mediainfo.title_year} 正在洗版,更新资源优先级为 {priority}')
self.subscribeoper.update(subscribe.id, {
"current_priority": priority
})

def finish_subscribe_or_not(self, subscribe: Subscribe, meta: MetaInfo, mediainfo: MediaInfo,
downloads: List[Context] = None,
Expand Down Expand Up @@ -432,9 +434,12 @@ def finish_subscribe_or_not(self, subscribe: Subscribe, meta: MetaInfo, mediainf
# 未下载到内容且不完整
logger.info(f'{mediainfo.title_year} 未下载完整,继续订阅 ...')
elif downloads:
# 洗板,下载到了内容,更新资源优先级
# 洗版下载到了内容,更新资源优先级
self.update_subscribe_priority(subscribe=subscribe, meta=meta,
mediainfo=mediainfo, downloads=downloads)
elif subscribe.current_priority == 100:
# 洗版完成
self.__finish_subscribe(subscribe=subscribe, meta=meta, mediainfo=mediainfo)
else:
# 洗版,未下载到内容
logger.info(f'{mediainfo.title_year} 继续洗版 ...')
Expand Down Expand Up @@ -818,6 +823,8 @@ def __get_downloaded(subscribe: Subscribe) -> List[int]:
"""
获取已下载过的集数或电影
"""
if subscribe.best_version:
return []
note = subscribe.note or []
if not note:
return []
Expand Down Expand Up @@ -861,23 +868,20 @@ def __update_lack_episodes(self, lefts: Dict[Union[int, str], Dict[int, NotExist
lack_episode = len(left_episodes)
logger.info(f"{mediainfo.title_year}{season} 更新缺失集数为{lack_episode} ...")
break
update_data = {"lack_episode": lack_episode}
update_data["lack_episode"] = lack_episode
# 更新数据库
if update_data:
self.subscribeoper.update(subscribe.id, update_data)

def __finish_subscribe(self, subscribe: Subscribe, mediainfo: MediaInfo,
meta: MetaBase, bestversion: bool = False):
def __finish_subscribe(self, subscribe: Subscribe, mediainfo: MediaInfo, meta: MetaBase):
"""
完成订阅
"""
# 如果订阅状态为待定(P),说明订阅信息尚未完全更新,无法完成订阅
if subscribe.state == "P":
return
# 完成订阅
msgstr = "订阅"
if bestversion:
msgstr = "洗版"
msgstr = "订阅" if not subscribe.best_version else "洗版"
logger.info(f'{mediainfo.title_year} 完成{msgstr}')
# 新增订阅历史
self.subscribeoper.add_history(**subscribe.to_dict())
Expand Down Expand Up @@ -1291,25 +1295,30 @@ def check_and_handle_existing_media(self, subscribe: Subscribe, meta: MetaInfo,
totals=totals
)
else:
# 洗版
exist_flag = False
if meta.type == MediaType.TV:
# 对于电视剧,构造缺失的媒体信息
no_exists = {
mediakey: {
subscribe.season: NotExistMediaInfo(
season=subscribe.season,
episodes=[],
total_episode=subscribe.total_episode,
start_episode=subscribe.start_episode or 1)
}
}
else:
# 洗版,如果已经满足了优先级,则认为已经洗版完成
if subscribe.current_priority == 100:
exist_flag = True
no_exists = {}
else:
exist_flag = False
if meta.type == MediaType.TV:
# 对于电视剧,构造缺失的媒体信息
no_exists = {
mediakey: {
subscribe.season: NotExistMediaInfo(
season=subscribe.season,
episodes=[],
total_episode=subscribe.total_episode,
start_episode=subscribe.start_episode or 1)
}
}
else:
no_exists = {}

# 如果媒体已存在,执行订阅完成操作
if exist_flag:
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
if not subscribe.best_version:
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
self.finish_subscribe_or_not(subscribe=subscribe, meta=meta, mediainfo=mediainfo, force=True)
return True, no_exists

Expand Down
2 changes: 2 additions & 0 deletions app/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,6 +481,7 @@ def CACHE_CONF(self):
"refresh": 100,
"tmdb": 1024,
"douban": 512,
"bangumi": 512,
"fanart": 512,
"meta": (self.META_CACHE_EXPIRE or 24) * 3600
}
Expand All @@ -489,6 +490,7 @@ def CACHE_CONF(self):
"refresh": 50,
"tmdb": 256,
"douban": 256,
"bangumi": 256,
"fanart": 128,
"meta": (self.META_CACHE_EXPIRE or 2) * 3600
}
Expand Down
10 changes: 5 additions & 5 deletions app/core/security.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ def decrypt(data: bytes, key: bytes) -> Optional[bytes]:
return None


def encrypt_message(message: str, key: bytes):
def encrypt_message(message: str, key: bytes) -> str:
"""
使用给定的key对消息进行加密,并返回加密后的字符串
"""
Expand All @@ -295,14 +295,14 @@ def encrypt_message(message: str, key: bytes):
return encrypted_message.decode()


def hash_sha256(message):
def hash_sha256(message: str) -> str:
"""
对字符串做hash运算
"""
return hashlib.sha256(message.encode()).hexdigest()


def aes_decrypt(data, key):
def aes_decrypt(data: str, key: str) -> str:
"""
AES解密
"""
Expand All @@ -322,7 +322,7 @@ def aes_decrypt(data, key):
return result.decode('utf-8')


def aes_encrypt(data, key):
def aes_encrypt(data: str, key: str) -> str:
"""
AES加密
"""
Expand All @@ -338,7 +338,7 @@ def aes_encrypt(data, key):
return base64.b64encode(cipher.iv + result).decode('utf-8')


def nexusphp_encrypt(data_str: str, key):
def nexusphp_encrypt(data_str: str, key: bytes) -> str:
"""
NexusPHP加密
"""
Expand Down
4 changes: 2 additions & 2 deletions app/db/models/siteuserdata.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from datetime import datetime

from sqlalchemy import Column, Integer, String, Sequence, Float, JSON, func
from sqlalchemy import Column, Integer, String, Sequence, Float, JSON, func, or_
from sqlalchemy.orm import Session

from app.db import db_query, Base
Expand Down Expand Up @@ -81,7 +81,7 @@ def get_latest(db: Session):
func.max(SiteUserData.updated_day).label('latest_update_day')
)
.group_by(SiteUserData.domain)
.filter(SiteUserData.err_msg.is_(None))
.filter(or_(SiteUserData.err_msg.is_(None), SiteUserData.err_msg == ""))
.subquery()
)

Expand Down
3 changes: 2 additions & 1 deletion app/db/site_oper.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,8 @@ def update_userdata(self, domain: str, name: str, payload: dict) -> Tuple[bool,
"domain": domain,
"name": name,
"updated_day": current_day,
"updated_time": current_time
"updated_time": current_time,
"err_msg": payload.get("err_msg") or ""
})
# 按站点+天判断是否存在数据
siteuserdatas = SiteUserData.get_by_domain(self._db, domain=domain, workdate=current_day)
Expand Down
4 changes: 3 additions & 1 deletion app/modules/bangumi/bangumi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
from functools import lru_cache

import requests
from cachetools import TTLCache, cached

from app.core.config import settings
from app.utils.http import RequestUtils


Expand All @@ -28,7 +30,7 @@ def __init__(self):
pass

@classmethod
@lru_cache(maxsize=128)
@cached(cache=TTLCache(maxsize=settings.CACHE_CONF["bangumi"], ttl=settings.CACHE_CONF["meta"]))
def __invoke(cls, url, **kwargs):
req_url = cls._base_url + url
params = {}
Expand Down
Loading

0 comments on commit 7288dd2

Please sign in to comment.