2022.01.30 버그 픽스

This commit is contained in:
2023-01-30 19:26:57 +09:00
parent a9ea8f193f
commit cae6985aec
5 changed files with 888 additions and 187 deletions

View File

@@ -73,25 +73,6 @@ name = "linkkf"
class LogicLinkkf(LogicModuleBase):
db_default = {
"linkkf_db_version": "1",
"linkkf_url": "https://linkkf.app",
"linkkf_download_path": os.path.join(path_data, P.package_name, "linkkf"),
"linkkf_auto_make_folder": "True",
"linkkf_auto_make_season_folder": "True",
"linkkf_finished_insert": "[완결]",
"linkkf_max_ffmpeg_process_count": "1",
"linkkf_order_desc": "True",
"linkkf_auto_start": "False",
"linkkf_interval": "* 5 * * *",
"linkkf_auto_mode_all": "False",
"linkkf_auto_code_list": "all",
"linkkf_current_code": "",
"linkkf_uncompleted_auto_enqueue": "False",
"linkkf_image_url_prefix_series": "",
"linkkf_image_url_prefix_episode": "",
"linkkf_discord_notify": "True",
}
current_headers = None
current_data = None
referer = None
@@ -116,9 +97,38 @@ class LogicLinkkf(LogicModuleBase):
def __init__(self, P):
super(LogicLinkkf, self).__init__(P, "setting", scheduler_desc="linkkf 자동 다운로드")
self.name = "linkkf"
self.queue = None
self.name = name
self.db_default = {
"linkkf_db_version": "1",
"linkkf_url": "https://linkkf.app",
f"{self.name}_recent_code": "",
"linkkf_download_path": os.path.join(path_data, P.package_name, "linkkf"),
"linkkf_save_path": os.path.join(path_data, P.package_name, "linkkf"),
"linkkf_auto_make_folder": "True",
"linkkf_auto_make_season_folder": "True",
"linkkf_finished_insert": "[완결]",
"linkkf_max_ffmpeg_process_count": "2",
f"{self.name}_max_download_count": "2",
f"{self.name}_quality": "720p",
"linkkf_order_desc": "False",
"linkkf_auto_start": "False",
"linkkf_interval": "* 5 * * *",
"linkkf_auto_mode_all": "False",
"linkkf_auto_code_list": "all",
"linkkf_current_code": "",
"linkkf_uncompleted_auto_enqueue": "False",
"linkkf_image_url_prefix_series": "",
"linkkf_image_url_prefix_episode": "",
"linkkf_discord_notify": "True",
}
self.current_data = None
default_route_socketio(P, self)
@staticmethod
def db_init():
pass
def process_menu(self, sub, req):
arg = P.ModelSetting.to_dict()
arg["sub"] = self.name
@@ -196,15 +206,18 @@ class LogicLinkkf(LogicModuleBase):
ret["ret"] = self.add(info)
return jsonify(ret)
elif sub == "entity_list":
pass
return jsonify(self.queue.get_entity_list())
elif sub == "queue_command":
pass
ret = self.queue.command(
req.form["command"], int(req.form["entity_id"])
)
return jsonify(ret)
elif sub == "add_queue_checked_list":
pass
elif sub == "web_list":
pass
return jsonify(ModelLinkkfItem.web_list(request))
elif sub == "db_remove":
pass
return jsonify(ModelLinkkfItem.delete_by_id(req.form["id"]))
elif sub == "add_whitelist":
pass
@@ -278,6 +291,382 @@ class LogicLinkkf(LogicModuleBase):
timeout=10,
).content.decode("utf8", errors="replace")
@staticmethod
def add_whitelist(*args):
ret = {}
logger.debug(f"args: {args}")
try:
if len(args) == 0:
code = str(LogicLinkkf.current_data["code"])
else:
code = str(args[0])
print(code)
whitelist_program = P.ModelSetting.get("linkkf_auto_code_list")
# whitelist_programs = [
# str(x.strip().replace(" ", ""))
# for x in whitelist_program.replace("\n", "|").split("|")
# ]
whitelist_programs = [
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")
]
if code not in whitelist_programs:
whitelist_programs.append(code)
whitelist_programs = filter(
lambda x: x != "", whitelist_programs
) # remove blank code
whitelist_program = "|".join(whitelist_programs)
entity = (
db.session.query(P.ModelSetting)
.filter_by(key="linkkf_auto_code_list")
.with_for_update()
.first()
)
entity.value = whitelist_program
db.session.commit()
ret["ret"] = True
ret["code"] = code
if len(args) == 0:
return LogicLinkkf.current_data
else:
return ret
else:
ret["ret"] = False
ret["log"] = "이미 추가되어 있습니다."
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
ret["ret"] = False
ret["log"] = str(e)
return ret
def setting_save_after(self):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
"linkkf_max_ffmpeg_process_count"
):
self.queue.set_max_ffmpeg_count(
P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
)
def get_video_url_from_url(url, url2):
video_url = None
referer_url = None
vtt_url = None
LogicLinkkf.referer = url2
# logger.info("dx download url : %s , url2 : %s" % (url, url2))
# logger.debug(LogicLinkkfYommi.referer)
try:
if "ani1" in url2:
# kfani 계열 처리 => 방문해서 m3u8을 받아온다.
logger.debug("ani1 routine=========================")
LogicLinkkf.referer = "https://linkkf.app"
# logger.debug(f"url2: {url2}")
ani1_html = LogicLinkkf.get_html(url2)
tree = html.fromstring(ani1_html)
option_url = tree.xpath("//select[@id='server-list']/option[1]/@value")
# logger.debug(f"option_url:: {option_url}")
data = LogicLinkkf.get_html(option_url[0])
# print(type(data))
regex2 = r'"([^\"]*m3u8)"|<source[^>]+src=\"([^"]+)'
temp_url = re.findall(regex2, data)[0]
video_url = ""
ref = "https://ani1.app"
for i in temp_url:
if i is None:
continue
video_url = i
# video_url = '{1} -headers \'Referer: "{0}"\' -user_agent "Mozilla/5.0 (Windows NT 10.0; Win64;
# x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3554.0 Safari/537.36"'.format(ref,
# video_url)
data_tree = html.fromstring(data)
# print(data_tree.xpath("//video/source/@src"))
vtt_elem = data_tree.xpath("//track/@src")[0]
# vtt_elem = data_tree.xpath("//*[contains(@src, '.vtt']")[0]
# print(vtt_elem)
match = re.compile(
r"<track.+src=\"(?P<vtt_url>.*?.vtt)\"", re.MULTILINE
).search(data)
vtt_url = match.group("vtt_url")
referer_url = "https://kfani.me/"
elif "kfani" in url2:
# kfani 계열 처리 => 방문해서 m3u8을 받아온다.
logger.debug("kfani routine=================================")
LogicLinkkf.referer = url2
# logger.debug(f"url2: {url2}")
data = LogicLinkkf.get_html(url2)
# logger.info("dx: data", data)
regex2 = r'"([^\"]*m3u8)"|<source[^>]+src=\"([^"]+)'
temp_url = re.findall(regex2, data)[0]
video_url = ""
ref = "https://kfani.me"
for i in temp_url:
if i is None:
continue
video_url = i
# video_url = '{1} -headers \'Referer: "{0}"\' -user_agent "Mozilla/5.0 (Windows NT 10.0; Win64;
# x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3554.0 Safari/537.36"'.format(ref,
# video_url)
# @k45734
vtt_url = None
try:
_match1 = re.compile(
r"<track.+src=\"(?P<vtt_url>.*?.vtt)", re.MULTILINE
).search(data)
vtt_url = _match1.group("vtt_url")
except:
_match2 = re.compile(
r"url: \'(?P<vtt_url>.*?.vtt)", re.MULTILINE
).search(data)
vtt_url = _match2.group("vtt_url")
logger.info("vtt_url: %s", vtt_url)
referer_url = url2
elif "kftv" in url2:
# kftv 계열 처리 => url의 id로 https://yt.kftv.live/getLinkStreamMd5/df6960891d226e24b117b850b44a2290 페이지
# 접속해서 json 받아오고, json에서 url을 추출해야함
if "=" in url2:
md5 = urlparse.urlparse(url2).query.split("=")[1]
elif "embedplay" in url2:
md5 = url2.split("/")[-1]
url3 = "https://yt.kftv.live/getLinkStreamMd5/" + md5
# logger.info("download url : %s , url3 : %s" % (url, url3))
data3 = LogicLinkkf.get_html(url3)
data3dict = json.loads(data3)
# print(data3dict)
video_url = data3dict[0]["file"]
elif "k40chan" in url2:
# k40chan 계열 처리 => 방문해서 m3u8을 받아온다.
# k45734 님 소스 반영 (확인은 안해봄 잘 동작할꺼라고 믿고,)
logger.debug("k40chan routine=================================")
LogicLinkkf.referer = url2
data = LogicLinkkf.get_html(url2)
regex2 = r'"([^\"]*m3u8)"|<source[^>]+src=\"([^"]+)'
temp_url = re.findall(regex2, data)[0]
video_url = ""
# ref = "https://kfani.me"
for i in temp_url:
if i is None:
continue
video_url = i
match = re.compile(r"<track.+src\=\"(?P<vtt_url>.*?.vtt)").search(data)
vtt_url = match.group("vtt_url")
referer_url = url2
elif "linkkf" in url2:
logger.deubg("linkkf routine")
# linkkf 계열 처리 => URL 리스트를 받아오고, 하나 골라 방문 해서 m3u8을 받아온다.
referer_url = url2
data2 = LogicLinkkf.get_html(url2)
# print(data2)
regex = r"cat1 = [^\[]*([^\]]*)"
cat = re.findall(regex, data2)[0]
# logger.info("cat: %s", cat)
regex = r"\"([^\"]*)\""
url3s = re.findall(regex, cat)
url3 = random.choice(url3s)
# logger.info("url3: %s", url3)
# logger.info("download url : %s , url3 : %s" % (url, url3))
if "kftv" in url3:
return LogicLinkkf.get_video_url_from_url(url2, url3)
elif url3.startswith("/"):
url3 = urlparse.urljoin(url2, url3)
print("url3 = ", url3)
LogicLinkkf.referer = url2
data3 = LogicLinkkf.get_html(url3)
# logger.info('data3: %s', data3)
# regex2 = r'"([^\"]*m3u8)"'
regex2 = r'"([^\"]*mp4|m3u8)"'
video_url = re.findall(regex2, data3)[0]
# logger.info('video_url: %s', video_url)
referer_url = url3
else:
logger.error("새로운 유형의 url 발생! %s %s %s" % (url, url2, url3))
elif "kakao" in url2:
# kakao 계열 처리, 외부 API 이용
payload = {"inputUrl": url2}
kakao_url = (
"http://webtool.cusis.net/wp-pages/download-kakaotv-video/video.php"
)
data2 = requests.post(
kakao_url,
json=payload,
headers={
"referer": "http://webtool.cusis.net/download-kakaotv-video/"
},
).content
time.sleep(3) # 서버 부하 방지를 위해 단시간에 너무 많은 URL전송을 하면 IP를 차단합니다.
url3 = json.loads(data2)
# logger.info("download url2 : %s , url3 : %s" % (url2, url3))
video_url = url3
elif "#V" in url2: # V 패턴 추가
print("#v routine")
data2 = LogicLinkkf.get_html(url2)
regex = r"cat1 = [^\[]*([^\]]*)"
cat = re.findall(regex, data2)[0]
regex = r"\"([^\"]*)\""
url3s = re.findall(regex, cat)
url3 = random.choice(url3s)
# logger.info("download url : %s , url3 : %s" % (url, url3))
if "kftv" in url3:
return LogicLinkkf.get_video_url_from_url(url2, url3)
elif url3.startswith("/"):
url3 = urlparse.urljoin(url2, url3)
LogicLinkkf.referer = url2
data3 = LogicLinkkf.get_html(url3)
regex2 = r'"([^\"]*mp4)"'
video_url = re.findall(regex2, data3)[0]
else:
logger.error("새로운 유형의 url 발생! %s %s %s" % (url, url2, url3))
elif "#M2" in url2:
LogicLinkkf.referer = url2
data2 = LogicLinkkf.get_html(url2)
# print(data2)
regex = r"cat1 = [^\[]*([^\]]*)"
cat = re.findall(regex, data2)[0]
regex = r"\"([^\"]*)\""
url3s = re.findall(regex, cat)
url3 = random.choice(url3s)
# logger.info("download url : %s , url3 : %s" % (url, url3))
if "kftv" in url3:
return LogicLinkkf.get_video_url_from_url(url2, url3)
elif url3.startswith("/"):
url3 = urlparse.urljoin(url2, url3)
LogicLinkkf.referer = url2
data3 = LogicLinkkf.get_html(url3)
# print("내용: %s", data3)
# logger.info("movie content: %s", data3)
# regex2 = r'"([^\"]*m3u8)"'
regex2 = r'"([^\"]*mp4)"'
video_url = re.findall(regex2, data3)[0]
else:
logger.error("새로운 유형의 url 발생! %s %s %s" % (url, url2, url3))
elif "😀#i" in url2:
LogicLinkkf.referer = url2
data2 = LogicLinkkf.get_html(url2)
# logger.info(data2)
regex = r"cat1 = [^\[]*([^\]]*)"
cat = re.findall(regex, data2)[0]
regex = r"\"([^\"]*)\""
url3s = re.findall(regex, cat)
url3 = random.choice(url3s)
# logger.info("download url : %s , url3 : %s" % (url, url3))
elif "#k" in url2:
data2 = LogicLinkkf.get_html(url2)
# logger.info(data2)
regex = r"cat1 = [^\[]*([^\]]*)"
cat = re.findall(regex, data2)[0]
regex = r"\"([^\"]*)\""
url3s = re.findall(regex, cat)
url3 = random.choice(url3s)
# logger.info("download url : %s , url3 : %s" % (url, url3))
elif "#k2" in url2:
data2 = LogicLinkkf.get_html(url2)
# logger.info(data2)
regex = r"cat1 = [^\[]*([^\]]*)"
cat = re.findall(regex, data2)[0]
regex = r"\"([^\"]*)\""
url3s = re.findall(regex, cat)
url3 = random.choice(url3s)
# logger.info("download url : %s , url3 : %s" % (url, url3))
elif "mopipi" in url2:
LogicLinkkf.referer = url
data2 = LogicLinkkf.get_html(url2)
# logger.info(data2)
match = re.compile(r"src\=\"(?P<video_url>http.*?\.mp4)").search(data2)
video_url = match.group("video_url")
match = re.compile(r"src\=\"(?P<vtt_url>http.*?.vtt)").search(data2)
logger.info("match group: %s", match.group("video_url"))
vtt_url = match.group("vtt_url")
# logger.info("download url : %s , url3 : %s" % (url, url3))
else:
logger.error("새로운 유형의 url 발생! %s %s" % (url, url2))
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
return [video_url, referer_url, vtt_url]
@staticmethod
def get_html_episode_content(url: str) -> str:
if url.startswith("http"):
html_data = LogicLinkkf.get_html(url)
else:
url = f"https://linkkf.app{url}"
logger.info("get_video_url(): url: %s" % url)
data = LogicLinkkf.get_html(url)
tree = html.fromstring(data)
tree = html.fromstring(data)
pattern = re.compile("var player_data=(.*)")
js_scripts = tree.xpath("//script")
iframe_info = None
index = 0
for js_script in js_scripts:
# print(f"{index}.. {js_script.text_content()}")
if pattern.match(js_script.text_content()):
# logger.debug("match::::")
match_data = pattern.match(js_script.text_content())
iframe_info = json.loads(
match_data.groups()[0].replace("path:", '"path":')
)
# logger.debug(f"iframe_info:: {iframe_info}")
index += 1
##################################################
# iframe url:: https://s2.ani1c12.top/player/index.php?data='+player_data.url+'
####################################################
url = f'https://s2.ani1c12.top/player/index.php?data={iframe_info["url"]}'
html_data = LogicLinkkf.get_html(url)
return html_data
def get_anime_info(self, cate, page):
try:
if cate == "ing":
@@ -580,7 +969,211 @@ class LogicLinkkf(LogicModuleBase):
logger.error(f"Exception: {str(e)}")
logger.error(traceback.format_exc())
pass
def add(self, episode_info):
print("episode_info")
logger.debug(episode_info)
if self.is_exist(episode_info):
return "queue_exist"
else:
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["_id"])
logger.debug("db_entity:::> %s", db_entity)
# logger.debug("db_entity.status ::: %s", db_entity.status)
if db_entity is None:
entity = LinkkfQueueEntity(P, self, episode_info)
logger.debug("entity:::> %s", entity.as_dict())
ModelLinkkfItem.append(entity.as_dict())
# # logger.debug("entity:: type >> %s", type(entity))
#
self.queue.add_queue(entity)
# self.download_queue.add_queue(entity)
# P.logger.debug(F.config['path_data'])
# P.logger.debug(self.headers)
# filename = os.path.basename(entity.filepath)
# ffmpeg = SupportFfmpeg(entity.url, entity.filename, callback_function=self.callback_function,
# max_pf_count=0,
# save_path=entity.savepath, timeout_minute=60, headers=self.headers)
# ret = {'ret': 'success'}
# ret['json'] = ffmpeg.start()
return "enqueue_db_append"
elif db_entity.status != "completed":
entity = LinkkfQueueEntity(P, self, episode_info)
self.queue.add_queue(entity)
return "enqueue_db_exist"
else:
return "db_completed"
def is_exist(self, info):
for _ in self.queue.entity_list:
if _.info["_id"] == info["_id"]:
return True
return False
# @staticmethod
def plugin_load(self):
try:
logger.debug("%s plugin_load", P.package_name)
self.queue = FfmpegQueue(
P, P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
)
self.current_data = None
self.queue.queue_start()
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
@staticmethod
def plugin_unload():
try:
logger.debug("%s plugin_unload", P.package_name)
scheduler.remove_job("%s_recent" % P.package_name)
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
@staticmethod
def reset_db() -> bool:
db.session.query(ModelLinkkfItem).delete()
db.session.commit()
return True
class LinkkfQueueEntity(FfmpegQueueEntity):
def __init__(self, P, module_logic, info):
super(LinkkfQueueEntity, self).__init__(P, module_logic, info)
self._vi = None
self.url = None
self.epi_queue = None
self.filepath = None
self.savepath = None
self.quality = None
self.filename = None
self.vtt = None
self.season = 1
self.content_title = None
self.srt_url = None
self.headers = None
# Todo::: 임시 주석 처리
self.make_episode_info()
def refresh_status(self):
self.module_logic.socketio_callback("status", self.as_dict())
def info_dict(self, tmp):
# logger.debug('self.info::> %s', self.info)
for key, value in self.info.items():
tmp[key] = value
tmp["vtt"] = self.vtt
tmp["season"] = self.season
tmp["content_title"] = self.content_title
tmp["linkkf_info"] = self.info
tmp["epi_queue"] = self.epi_queue
return tmp
def download_completed(self):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None:
db_entity.status = "completed"
db_entity.complated_time = datetime.now()
db_entity.save()
def donwload_completed(self):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None:
db_entity.status = "completed"
db_entity.complated_time = datetime.now()
db_entity.save()
# Get episode info from site
def make_episode_info(self):
url2s = []
url = None
logger.debug(self.info)
logger.debug(self.info["url"])
try:
# logger.debug(self)
# logger.debug(self.url)
data = LogicLinkkf.get_html_episode_content(self.info["url"])
tree = html.fromstring(data)
xpath_select_query = '//*[@id="body"]/div/span/center/select/option'
if len(tree.xpath(xpath_select_query)) > 0:
# by k45734
print("ok")
xpath_select_query = '//select[@class="switcher"]/option'
for tag in tree.xpath(xpath_select_query):
url2s2 = tag.attrib["value"]
if "k40chan" in url2s2:
pass
elif "ani1c12" in url2s2:
pass
else:
url2s.append(url2s2)
else:
print(":: else ::")
tt = re.search(r"var player_data=(.*?)<", data, re.S)
json_string = tt.group(1)
tt2 = re.search(r'"url":"(.*?)"', json_string, re.S)
json_string2 = tt2.group(1)
ttt = "https://s2.ani1c12.top/player/index.php?data=" + json_string2
response = LogicLinkkf.get_html(ttt)
tree = html.fromstring(response)
xpath_select_query = '//select[@id="server-list"]/option'
for tag in tree.xpath(xpath_select_query):
url2s2 = tag.attrib["value"]
# if 'k40chan' in url2s2:
# pass
# elif 'k39aha' in url2s2:
if "ds" in url2s2:
pass
else:
url2s.append(url2s2)
# logger.info('dx: url', url)
logger.info("dx: urls2:: %s", url2s)
video_url = None
referer_url = None # dx
for url2 in url2s:
try:
if video_url is not None:
continue
# logger.debug(f"url: {url}, url2: {url2}")
ret = LogicLinkkf.get_video_url_from_url(url, url2)
logger.debug(f"ret::::> {ret}")
if ret is not None:
video_url = ret
referer_url = url2
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
# logger.info(video_url)
# return [video_url, referer_url]
return video_url
logger.info("dx: urls2:: %s", url2s)
video_url = None
referer_url = None # dx
except Exception as e:
logger.error(f"Exception: {str(e)}")
logger.error(traceback.format_exc())
class ModelLinkkfItem(db.Model):
@@ -596,7 +1189,7 @@ class ModelLinkkfItem(db.Model):
episode_no = db.Column(db.Integer)
title = db.Column(db.String)
episode_title = db.Column(db.String)
linkkf_va = db.Column(db.String)
# linkkf_va = db.Column(db.String)
linkkf_vi = db.Column(db.String)
linkkf_id = db.Column(db.String)
quality = db.Column(db.String)
@@ -609,8 +1202,8 @@ class ModelLinkkfItem(db.Model):
status = db.Column(db.String)
linkkf_info = db.Column(db.JSON)
def __int__(self):
self.created_time == datetime.now()
def __init__(self):
self.created_time = datetime.now()
def __repr__(self):
return repr(self.as_dict())
@@ -632,3 +1225,83 @@ class ModelLinkkfItem(db.Model):
@classmethod
def get_by_id(cls, idx):
return db.session.query(cls).filter_by(id=idx).first()
@classmethod
def get_by_linkkf_id(cls, linkkf_id):
return db.session.query(cls).filter_by(linkkf_id=linkkf_id).first()
@classmethod
def delete_by_id(cls, idx):
db.session.query(cls).filter_by(id=idx).delete()
db.session.commit()
return True
@classmethod
def web_list(cls, req):
ret = {}
page = int(req.form["page"]) if "page" in req.form else 1
page_size = 30
job_id = ""
search = req.form["search_word"] if "search_word" in req.form else ""
option = req.form["option"] if "option" in req.form else "all"
order = req.form["order"] if "order" in req.form else "desc"
query = cls.make_query(search=search, order=order, option=option)
count = query.count()
query = query.limit(page_size).offset((page - 1) * page_size)
lists = query.all()
ret["list"] = [item.as_dict() for item in lists]
ret["paging"] = Util.get_paging_info(count, page, page_size)
return ret
@classmethod
def make_query(cls, search="", order="desc", option="all"):
query = db.session.query(cls)
if search is not None and search != "":
if search.find("|") != -1:
tmp = search.split("|")
conditions = []
for tt in tmp:
if tt != "":
conditions.append(cls.filename.like("%" + tt.strip() + "%"))
query = query.filter(or_(*conditions))
elif search.find(",") != -1:
tmp = search.split(",")
for tt in tmp:
if tt != "":
query = query.filter(cls.filename.like("%" + tt.strip() + "%"))
else:
query = query.filter(cls.filename.like("%" + search + "%"))
if option == "completed":
query = query.filter(cls.status == "completed")
query = (
query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id)
)
return query
@classmethod
def get_list_uncompleted(cls):
return db.session.query(cls).filter(cls.status != "completed").all()
@classmethod
def append(cls, q):
logger.debug(q)
item = ModelLinkkfItem()
item.content_code = q["program_code"]
item.season = q["season"]
item.episode_no = q["epi_queue"]
item.title = q["content_title"]
item.episode_title = q["title"]
# item.linkkf_va = q["va"]
item.linkkf_code = q["code"]
item.linkkf_id = q["_id"]
item.quality = q["quality"]
item.filepath = q["filepath"]
item.filename = q["filename"]
item.savepath = q["savepath"]
item.video_url = q["url"]
item.vtt_url = q["vtt"]
item.thumbnail = q["image"][0]
item.status = "wait"
item.linkkf_info = q["linkkf_info"]
item.save()