Compare commits

..

57 Commits

Author SHA1 Message Date
a8486726f6 2024.08.21 patch.03 2024-08-21 19:39:50 +09:00
391a0ee861 2024.08.21 patch.02 2024-08-21 19:24:29 +09:00
408be433f2 2024.08.21 patch.01 2024-08-21 19:13:43 +09:00
c87e29f085 2024.08.13 19:22 patch.01 2024-08-13 19:26:16 +09:00
b27cd39aa4 2024.08.13 19:22 2024-08-13 19:22:47 +09:00
205c17ae4e edit fix3 2024-02-16 19:00:01 +09:00
e101a02886 error fix2 2024-02-16 15:07:14 +09:00
04c0e34db5 error fix 2024-02-13 15:23:57 +09:00
f1d5f1db68 main -> 2023.10.8 fix "작화" 2023-10-08 22:29:02 +09:00
f0eda8ef87 main -> 2023.10.5 fix 2023-10-05 21:20:18 +09:00
d4fcc9a633 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-23 22:48:21 +09:00
9ca8dcc3da main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 21:57:21 +09:00
301806a906 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 14:40:13 +09:00
eca29b6947 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 14:32:56 +09:00
d07cc820dc main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-19 23:47:52 +09:00
710d70dbfd main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-19 23:42:16 +09:00
6f2edeaf89 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-09-19 23:37:50 +09:00
769d40e5bb main -> 2023.08.31 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-31 00:20:57 +09:00
c53f1f50c9 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-12 21:22:28 +09:00
9cae04584d main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:16:59 +09:00
efcadde111 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:08:46 +09:00
145e277895 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:06:08 +09:00
1b76d36352 main -> 2023.07.01 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-07-01 00:13:09 +09:00
a7cf43e0cc main -> 2023.07.01 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-07-01 00:09:52 +09:00
dd8a68a267 main -> 2023.05.11 ohli24 버그 픽스 (.01. todo: setting_save_after) 2023-05-11 19:41:39 +09:00
6bf816db10 main -> 2023.05.09 ohli24 버그 픽스 (.01. referer url fix) 2023-05-09 20:56:57 +09:00
becfc7feef main -> 2023.05.09 ohli24 버그 픽스 (.01. referer url fix) 2023-05-09 20:54:59 +09:00
25cddecfe9 main -> 2023.04.22 ohli24 버그 픽스 (.01. img xpath fix) 2023-04-22 23:26:19 +09:00
292a3fd388 main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 21:11:49 +09:00
87461cce4a main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 20:35:22 +09:00
080ae6ab0c main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 20:26:38 +09:00
c8284f86b7 main -> 2023.03.03 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-03 18:54:10 +09:00
f4717c74e4 main -> 2023.03.01 ohli24 버그 픽스 (.02. code cleanup) 2023-03-01 19:34:53 +09:00
c6940bbca5 main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:05:58 +09:00
5506cc2e7f main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:03:56 +09:00
10bd5e7412 main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:01:39 +09:00
3162911f1e 2023.02.01 ohli24 버그 픽스 (.04. code cleanup) 2023-02-01 19:35:19 +09:00
7f00ca6055 2023.02.01 ohli24 버그 픽스 (.03. 코드 정리) 2023-02-01 19:22:06 +09:00
367cb85657 2023.02.01 ohli24 버그 픽스 (.02. etc) 2023-02-01 14:53:18 +09:00
9be3e03c2d 2023.02.01 linkkf 버그 픽스 (.01. etc) 2023-02-01 14:50:17 +09:00
2c67b0cacd 2022.01.31 linkkf 버그 픽스 (.07. 기타) 2023-01-31 23:42:51 +09:00
6c198dcc76 2022.01.31 linkkf 버그 픽스 (.06. 기타) 2023-01-31 23:09:12 +09:00
e37a3c652b 2022.01.31 linkkf 버그 픽스 (.05. 기타) 2023-01-31 22:46:52 +09:00
0e45dc8de7 2022.01.31 anilife 버그 픽스 (.04. 기타) 2023-01-31 21:27:04 +09:00
52c0802572 2022.01.31 anilife 버그 픽스 (.04. 기타) 2023-01-31 21:20:17 +09:00
361490fc43 2022.01.31 anilife 버그 픽스 (.04. 기타) 2023-01-31 21:19:40 +09:00
b4641b6591 2022.01.31 anilife 버그 픽스 (.03. 기타) 2023-01-31 19:09:25 +09:00
ab343b92b7 2022.01.31 anilife 버그 픽스 (.02. 기타) 2023-01-31 18:21:25 +09:00
548902e095 2022.01.31 anilife 버그 픽스 (.01) 2023-01-31 17:21:38 +09:00
e92c5f1c47 2022.01.31 anilife 버그 픽스 2023-01-31 15:04:49 +09:00
b079e75029 2022.01.31 anilife 버그 픽스 2023-01-31 01:33:06 +09:00
5483b57a50 2022.01.31 anilife 버그 픽스 2023-01-31 01:20:35 +09:00
fd380c1890 2022.01.30 버그 픽스 2023-01-31 00:26:47 +09:00
957cffe48d 2022.01.30 버그 픽스 2023-01-30 23:23:37 +09:00
022347688d remove cached.. 2023-01-30 19:53:51 +09:00
3675cb0538 2022.01.30 버그 픽스 2023-01-30 19:53:06 +09:00
4fbbbf5122 2022.01.30 버그 픽스 2023-01-30 19:52:16 +09:00
19 changed files with 2174 additions and 1850 deletions

BIN
.DS_Store vendored

Binary file not shown.

1
lib/plugin/__init__.py Normal file
View File

@@ -0,0 +1 @@
from .ffmpeg_queue import FfmpegQueueEntity, FfmpegQueue

301
lib/plugin/ffmpeg_queue.py Normal file
View File

@@ -0,0 +1,301 @@
# -*- coding: utf-8 -*-
#########################################################
# python
import abc
import os
import threading
import time
import traceback
from datetime import datetime
from framework import py_queue
# third-party
# sjva 공용
#########################################################
class FfmpegQueueEntity(abc.ABCMeta("ABC", (object,), {"__slots__": ()})):
def __init__(self, P, module_logic, info):
self.P = P
self.module_logic = module_logic
self.entity_id = -1 # FfmpegQueueEntity.static_index
self.info = info
self.url = None
self.ffmpeg_status = -1
self.ffmpeg_status_kor = "대기중"
self.ffmpeg_percent = 0
self.ffmpeg_arg = None
self.cancel = False
self.created_time = datetime.now().strftime("%m-%d %H:%M:%S")
self.savepath = None
self.filename = None
self.filepath = None
self.quality = None
self.headers = None
# FfmpegQueueEntity.static_index += 1
# FfmpegQueueEntity.entity_list.append(self)
def get_video_url(self):
return self.url
def get_video_filepath(self):
return self.filepath
@abc.abstractmethod
def refresh_status(self):
pass
@abc.abstractmethod
def info_dict(self, tmp):
pass
def download_completed(self):
pass
def as_dict(self):
tmp = {}
tmp["entity_id"] = self.entity_id
tmp["url"] = self.url
tmp["ffmpeg_status"] = self.ffmpeg_status
tmp["ffmpeg_status_kor"] = self.ffmpeg_status_kor
tmp["ffmpeg_percent"] = self.ffmpeg_percent
tmp["ffmpeg_arg"] = self.ffmpeg_arg
tmp["cancel"] = self.cancel
tmp["created_time"] = self.created_time # .strftime('%m-%d %H:%M:%S')
tmp["savepath"] = self.savepath
tmp["filename"] = self.filename
tmp["filepath"] = self.filepath
tmp["quality"] = self.quality
# tmp['current_speed'] = self.ffmpeg_arg['current_speed'] if self.ffmpeg_arg is not None else ''
tmp = self.info_dict(tmp)
return tmp
class FfmpegQueue(object):
def __init__(self, P, max_ffmpeg_count):
self.P = P
self.static_index = 1
self.entity_list = []
self.current_ffmpeg_count = 0
self.download_queue = None
self.download_thread = None
self.max_ffmpeg_count = max_ffmpeg_count
if self.max_ffmpeg_count is None or self.max_ffmpeg_count == "":
self.max_ffmpeg_count = 1
def queue_start(self):
try:
if self.download_queue is None:
self.download_queue = py_queue.Queue()
if self.download_thread is None:
self.download_thread = threading.Thread(
target=self.download_thread_function, args=()
)
self.download_thread.daemon = True
self.download_thread.start()
except Exception as exception:
self.P.logger.error("Exception:%s", exception)
self.P.logger.error(traceback.format_exc())
def download_thread_function(self):
while True:
try:
while True:
try:
if self.current_ffmpeg_count < self.max_ffmpeg_count:
break
time.sleep(5)
except Exception as exception:
self.P.logger.error("Exception:%s", exception)
self.P.logger.error(traceback.format_exc())
self.P.logger.error(
"current_ffmpeg_count : %s", self.current_ffmpeg_count
)
self.P.logger.error(
"max_ffmpeg_count : %s", self.max_ffmpeg_count
)
break
entity = self.download_queue.get()
if entity.cancel:
continue
# from .logic_ani24 import LogicAni24
# entity.url = LogicAni24.get_video_url(entity.info['code'])
video_url = entity.get_video_url()
if video_url is None:
entity.ffmpeg_status_kor = "URL실패"
entity.refresh_status()
# plugin.socketio_list_refresh()
continue
import ffmpeg
# max_pf_count = 0
# save_path = ModelSetting.get('download_path')
# if ModelSetting.get('auto_make_folder') == 'True':
# program_path = os.path.join(save_path, entity.info['filename'].split('.')[0])
# save_path = program_path
# try:
# if not os.path.exists(save_path):
# os.makedirs(save_path)
# except:
# logger.debug('program path make fail!!')
# 파일 존재여부 체크
filepath = str(entity.get_video_filepath())
self.P.logger.debug(filepath)
self.P.logger.debug(entity.get_video_filepath())
if os.path.exists(filepath):
entity.ffmpeg_status_kor = "파일 있음"
entity.ffmpeg_percent = 100
entity.refresh_status()
# plugin.socketio_list_refresh()
continue
dirname = os.path.dirname(filepath)
self.P.logger.debug(type(dirname))
self.P.logger.debug(dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
f = ffmpeg.Ffmpeg(
video_url,
os.path.basename(filepath),
plugin_id=entity.entity_id,
listener=self.ffmpeg_listener,
call_plugin=self.P.package_name,
save_path=dirname,
headers=entity.headers,
)
f.start()
self.current_ffmpeg_count += 1
self.download_queue.task_done()
except Exception as exception:
self.P.logger.error("Exception:%s", exception)
self.P.logger.error(traceback.format_exc())
def ffmpeg_listener(self, **arg):
import ffmpeg
entity = self.get_entity_by_entity_id(arg["plugin_id"])
if entity is None:
return
if arg["type"] == "status_change":
if arg["status"] == ffmpeg.Status.DOWNLOADING:
pass
elif arg["status"] == ffmpeg.Status.COMPLETED:
entity.download_completed()
elif arg["status"] == ffmpeg.Status.READY:
pass
elif arg["type"] == "last":
self.current_ffmpeg_count += -1
elif arg["type"] == "log":
pass
elif arg["type"] == "normal":
pass
entity.ffmpeg_arg = arg
entity.ffmpeg_status = int(arg["status"])
entity.ffmpeg_status_kor = str(arg["status"])
entity.ffmpeg_percent = arg["data"]["percent"]
entity.ffmpeg_arg["status"] = str(arg["status"])
# self.P.logger.debug(arg)
# import plugin
# arg['status'] = str(arg['status'])
# plugin.socketio_callback('status', arg)
entity.refresh_status()
# FfmpegQueueEntity.static_index += 1
# FfmpegQueueEntity.entity_list.append(self)
def add_queue(self, entity):
try:
# entity = QueueEntity.create(info)
# if entity is not None:
# LogicQueue.download_queue.put(entity)
# return True
entity.entity_id = self.static_index
self.static_index += 1
self.entity_list.append(entity)
self.download_queue.put(entity)
return True
except Exception as exception:
self.P.logger.error("Exception:%s", exception)
self.P.logger.error(traceback.format_exc())
return False
def set_max_ffmpeg_count(self, max_ffmpeg_count):
self.max_ffmpeg_count = max_ffmpeg_count
def get_max_ffmpeg_count(self):
return self.max_ffmpeg_count
def command(self, cmd, entity_id):
self.P.logger.debug("command :%s %s", cmd, entity_id)
ret = {}
try:
if cmd == "cancel":
self.P.logger.debug("command :%s %s", cmd, entity_id)
entity = self.get_entity_by_entity_id(entity_id)
if entity is not None:
if entity.ffmpeg_status == -1:
entity.cancel = True
entity.ffmpeg_status_kor = "취소"
# entity.refresh_status()
ret["ret"] = "refresh"
elif entity.ffmpeg_status != 5:
ret["ret"] = "notify"
ret["log"] = "다운로드중 상태가 아닙니다."
else:
idx = entity.ffmpeg_arg["data"]["idx"]
import ffmpeg
ffmpeg.Ffmpeg.stop_by_idx(idx)
entity.refresh_status()
ret["ret"] = "refresh"
elif cmd == "reset":
if self.download_queue is not None:
with self.download_queue.mutex:
self.download_queue.queue.clear()
for _ in self.entity_list:
if _.ffmpeg_status == 5:
import ffmpeg
idx = _.ffmpeg_arg["data"]["idx"]
ffmpeg.Ffmpeg.stop_by_idx(idx)
self.entity_list = []
ret["ret"] = "refresh"
elif cmd == "delete_completed":
new_list = []
for _ in self.entity_list:
if _.ffmpeg_status_kor in ["파일 있음", "취소", "사용자중지"]:
continue
if _.ffmpeg_status != 7:
new_list.append(_)
self.entity_list = new_list
ret["ret"] = "refresh"
elif cmd == "remove":
new_list = []
for _ in self.entity_list:
if _.entity_id == entity_id:
continue
new_list.append(_)
self.entity_list = new_list
ret["ret"] = "refresh"
return ret
except Exception as exception:
self.P.logger.error("Exception:%s", exception)
self.P.logger.error(traceback.format_exc())
def get_entity_by_entity_id(self, entity_id):
for _ in self.entity_list:
if _.entity_id == entity_id:
return _
return None
def get_entity_list(self):
ret = []
for x in self.entity_list:
tmp = x.as_dict()
ret.append(tmp)
return ret

View File

@@ -17,8 +17,8 @@ def yommi_timeit(func):
end_time = time.perf_counter() end_time = time.perf_counter()
total_time = end_time - start_time total_time = end_time - start_time
# print(f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs") # print(f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs")
logger.debug( logger.opt(colors=True).debug(
f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs" f"<red>{func.__name__}{args} {kwargs}</red> function took <green>{total_time:.4f}</green> secs"
) )
return result return result

View File

@@ -21,6 +21,9 @@ import urllib
# my # my
from .lib.utils import yommi_timeit from .lib.utils import yommi_timeit
os.system(f"pip install playwright==1.27.1")
packages = [ packages = [
"beautifulsoup4", "beautifulsoup4",
"requests-cache", "requests-cache",
@@ -51,10 +54,14 @@ from framework.util import Util
from framework.common.util import headers from framework.common.util import headers
from plugin import ( from plugin import (
LogicModuleBase, LogicModuleBase,
FfmpegQueueEntity,
FfmpegQueue,
default_route_socketio, default_route_socketio,
# FfmpegQueue,
# FfmpegQueueEntity,
) )
# 철자가 틀린 부분이 있어서 분리함
#
from .lib.plugin import FfmpegQueue, FfmpegQueueEntity
from tool_base import d from tool_base import d
# 패키지 # 패키지
@@ -71,20 +78,21 @@ class LogicAniLife(LogicModuleBase):
db_default = { db_default = {
"anilife_db_version": "1", "anilife_db_version": "1",
"anilife_url": "https://anilife.live", "anilife_url": "https://anilife.live",
"anilife_download_path": os.path.join(path_data, P.package_name, "ohli24"), "anilife_download_path": os.path.join(path_data, P.package_name, "anilife"),
"anilife_auto_make_folder": "True", "anilife_auto_make_folder": "True",
"anilife_auto_make_season_folder": "True", "anilife_auto_make_season_folder": "True",
"anilife_finished_insert": "[완결]", "anilife_finished_insert": "[완결]",
"anilife_max_ffmpeg_process_count": "1", "anilife_max_ffmpeg_process_count": "1",
"anilife_order_desc": "False", "anilife_order_desc": "True",
"anilife_auto_start": "False", "anilife_auto_start": "False",
"anilife_interval": "* 5 * * *", "anilife_interval": "* 5 * * *",
"anilife_auto_mode_all": "False", "anilife_auto_mode_all": "False",
"anilife_auto_code_list": "all", "anilife_auto_code_list": "",
"anilife_current_code": "", "anilife_current_code": "",
"anilife_uncompleted_auto_enqueue": "False", "anilife_uncompleted_auto_enqueue": "False",
"anilife_image_url_prefix_series": "https://www.jetcloud.cc/series/", "anilife_image_url_prefix_series": "",
"anilife_image_url_prefix_episode": "https://www.jetcloud-list.cc/thumbnail/", "anilife_image_url_prefix_episode": "",
"anilife_discord_notify": "True",
} }
current_headers = None current_headers = None
@@ -166,7 +174,7 @@ class LogicAniLife(LogicModuleBase):
async def get_html_playwright( async def get_html_playwright(
url: str, url: str,
headless: bool = False, headless: bool = False,
referer: str = None, referer: str = "",
engine: str = "chrome", engine: str = "chrome",
stealth: bool = False, stealth: bool = False,
) -> str: ) -> str:
@@ -178,43 +186,59 @@ class LogicAniLife(LogicModuleBase):
import time import time
cookie = None cookie = None
# browser_args = [
# "--window-size=1300,570",
# "--window-position=000,000",
# "--disable-dev-shm-usage",
# "--no-sandbox",
# "--disable-web-security",
# "--disable-features=site-per-process",
# "--disable-setuid-sandbox",
# "--disable-accelerated-2d-canvas",
# "--no-first-run",
# "--no-zygote",
# # '--single-process',
# "--disable-gpu",
# # "--use-gl=egl",
# "--disable-blink-features=AutomationControlled",
# # "--disable-background-networking",
# # "--enable-features=NetworkService,NetworkServiceInProcess",
# "--disable-background-timer-throttling",
# "--disable-backgrounding-occluded-windows",
# "--disable-breakpad",
# "--disable-client-side-phishing-detection",
# "--disable-component-extensions-with-background-pages",
# "--disable-default-apps",
# "--disable-extensions",
# "--disable-features=Translate",
# "--disable-hang-monitor",
# "--disable-ipc-flooding-protection",
# "--disable-popup-blocking",
# "--disable-prompt-on-repost",
# # "--disable-renderer-backgrounding",
# "--disable-sync",
# "--force-color-profile=srgb",
# # "--metrics-recording-only",
# # "--enable-automation",
# "--password-store=basic",
# # "--use-mock-keychain",
# # "--hide-scrollbars",
# "--mute-audio",
# ]
browser_args = [ browser_args = [
"--window-size=1300,570", "--window-size=1300,570",
"--window-position=000,000", "--window-position=0,0",
"--disable-dev-shm-usage", # "--disable-dev-shm-usage",
"--no-sandbox", "--no-sandbox",
"--disable-web-security", # "--disable-web-security",
"--disable-features=site-per-process", # "--disable-features=site-per-process",
"--disable-setuid-sandbox", # "--disable-setuid-sandbox",
"--disable-accelerated-2d-canvas", # "--disable-accelerated-2d-canvas",
"--no-first-run", # "--no-first-run",
"--no-zygote", # "--no-zygote",
# '--single-process', # "--single-process",
"--disable-gpu", "--disable-gpu",
"--use-gl=egl", # "--use-gl=egl",
"--disable-blink-features=AutomationControlled",
"--disable-background-networking",
"--enable-features=NetworkService,NetworkServiceInProcess",
"--disable-background-timer-throttling",
"--disable-backgrounding-occluded-windows",
"--disable-breakpad",
"--disable-client-side-phishing-detection",
"--disable-component-extensions-with-background-pages",
"--disable-default-apps",
"--disable-extensions",
"--disable-features=Translate",
"--disable-hang-monitor",
"--disable-ipc-flooding-protection",
"--disable-popup-blocking",
"--disable-prompt-on-repost",
"--disable-renderer-backgrounding",
"--disable-sync",
"--force-color-profile=srgb",
"--metrics-recording-only",
"--enable-automation",
"--password-store=basic",
"--use-mock-keychain",
"--hide-scrollbars",
"--mute-audio", "--mute-audio",
] ]
# scraper = cloudscraper.create_scraper( # scraper = cloudscraper.create_scraper(
@@ -302,7 +326,8 @@ class LogicAniLife(LogicModuleBase):
url, wait_until="load", referer=LogicAniLife.headers["Referer"] url, wait_until="load", referer=LogicAniLife.headers["Referer"]
) )
# page.wait_for_timeout(10000) # page.wait_for_timeout(10000)
await asyncio.sleep(2.9) # await asyncio.sleep(2.9)
await asyncio.sleep(1)
# await page.reload() # await page.reload()
@@ -313,11 +338,13 @@ class LogicAniLife(LogicModuleBase):
print(f"page.url:: {page.url}") print(f"page.url:: {page.url}")
LogicAniLife.origin_url = page.url LogicAniLife.origin_url = page.url
# print(page.content()) temp_content = await page.content()
#
# print(temp_content)
print(f"run at {time.time() - start} sec") print(f"run at {time.time() - start} sec")
return await page.content() return temp_content
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error("Exception:%s", e)
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
@@ -544,14 +571,12 @@ class LogicAniLife(LogicModuleBase):
har = await tracer.flush() har = await tracer.flush()
# page.wait_for_timeout(10000) # page.wait_for_timeout(10000)
await asyncio.sleep(2) await asyncio.sleep(1)
# logger.debug(har) # logger.debug(har)
# page.reload() # page.reload()
# time.sleep(10) # time.sleep(10)
# cookies = context.cookies
# print(cookies)
# print(page.content()) # print(page.content())
# vod_url = page.evaluate( # vod_url = page.evaluate(
@@ -564,23 +589,11 @@ class LogicAniLife(LogicModuleBase):
# return _0x55265f(0x99) + alJson[_0x55265f(0x91)] # return _0x55265f(0x99) + alJson[_0x55265f(0x91)]
# }""" # }"""
# ) # )
result_har_json = har.to_json() # result_har_json = har.to_json()
result_har_dict = har.to_dict()
# logger.debug(result_har_dict)
tmp_video_url = [] await context.close()
for i, elem in enumerate(result_har_dict["log"]["entries"]): await browser.close()
if "m3u8" in elem["request"]["url"]:
logger.debug(elem["request"]["url"])
tmp_video_url.append(elem["request"]["url"])
vod_url = tmp_video_url[-1]
logger.debug(f"vod_url:: {vod_url}")
logger.debug(f"run at {time.time() - start} sec")
return vod_url
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error("Exception:%s", e)
result = subprocess.run( result = subprocess.run(
@@ -589,9 +602,32 @@ class LogicAniLife(LogicModuleBase):
print(result.stdout) print(result.stdout)
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
finally: finally:
await browser.close() await browser.close()
result_har_dict = har.to_dict()
# logger.debug(result_har_dict)
tmp_video_url = []
for i, elem in enumerate(result_har_dict["log"]["entries"]):
# if "m3u8" in elem["request"]["url"]:
if "m3u8" in elem["request"]["url"]:
logger.debug(elem["request"]["url"])
tmp_video_url.append(elem["request"]["url"])
logger.debug(tmp_video_url)
vod_url = tmp_video_url[-1]
for i, el in enumerate(tmp_video_url):
if el.endswith("m3u8"):
vod_url = el
logger.debug(f"vod_url:: {vod_url}")
logger.debug(f"run at {time.time() - start} sec")
return vod_url
@staticmethod @staticmethod
@yommi_timeit @yommi_timeit
def get_html_selenium( def get_html_selenium(
@@ -604,6 +640,7 @@ class LogicAniLife(LogicModuleBase):
lang_kr=False, lang_kr=False,
secret_mode=False, secret_mode=False,
download_path=None, download_path=None,
stealth: bool = False,
) -> bytes: ) -> bytes:
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
from selenium import webdriver from selenium import webdriver
@@ -671,20 +708,22 @@ class LogicAniLife(LogicModuleBase):
ChromeDriverManager().install(), chrome_options=options ChromeDriverManager().install(), chrome_options=options
) )
driver.implicitly_wait(5) driver.implicitly_wait(10)
stealth( if stealth:
driver, stealth(
languages=["ko-KR", "ko"], driver,
vendor="Google Inc.", languages=["ko-KR", "ko"],
platform="Win32", vendor="Google Inc.",
webgl_vendor="Intel Inc.", platform="Win32",
renderer="Intel Iris OpenGL Engine", webgl_vendor="Intel Inc.",
fix_hairline=True, renderer="Intel Iris OpenGL Engine",
) fix_hairline=True,
)
driver.get(url) driver.get(url)
driver.refresh() # driver.refresh()
logger.debug(f"current_url:: {driver.current_url}") logger.debug(f"current_url:: {driver.current_url}")
# logger.debug(f"current_cookie:: {driver.get_cookies()}") # logger.debug(f"current_cookie:: {driver.get_cookies()}")
cookies_list = driver.get_cookies() cookies_list = driver.get_cookies()
@@ -697,7 +736,7 @@ class LogicAniLife(LogicModuleBase):
LogicAniLife.cookies = cookies_list LogicAniLife.cookies = cookies_list
# LogicAniLife.headers["Cookie"] = driver.get_cookies() # LogicAniLife.headers["Cookie"] = driver.get_cookies()
LogicAniLife.episode_url = driver.current_url LogicAniLife.episode_url = driver.current_url
time.sleep(1) # time.sleep(1)
elem = driver.find_element(By.XPATH, "//*") elem = driver.find_element(By.XPATH, "//*")
source_code = elem.get_attribute("outerHTML") source_code = elem.get_attribute("outerHTML")
@@ -919,8 +958,25 @@ class LogicAniLife(LogicModuleBase):
return jsonify(ModelAniLifeItem.web_list(request)) return jsonify(ModelAniLifeItem.web_list(request))
elif sub == "db_remove": elif sub == "db_remove":
return jsonify(ModelAniLifeItem.delete_by_id(req.form["id"])) return jsonify(ModelAniLifeItem.delete_by_id(req.form["id"]))
elif sub == "add_whitelist":
try:
# params = request.get_data()
# logger.debug(f"params: {params}")
# data_code = request.args.get("data_code")
params = request.get_json()
logger.debug(f"params:: {params}")
if params is not None:
code = params["data_code"]
logger.debug(f"params: {code}")
ret = LogicAniLife.add_whitelist(code)
else:
ret = LogicAniLife.add_whitelist()
return jsonify(ret)
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
except Exception as e: except Exception as e:
P.logger.error("Exception:%s", e) P.logger.error(f"Exception: {str(e)}")
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
@staticmethod @staticmethod
@@ -977,21 +1033,42 @@ class LogicAniLife(LogicModuleBase):
return ret return ret
def setting_save_after(self): def setting_save_after(self):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int( pass
"anilife_max_ffmpeg_process_count" # Todo: 버그 고쳐야함
): # if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
self.queue.set_max_ffmpeg_count( # "anilife_max_ffmpeg_process_count"
P.ModelSetting.get_int("anilife_max_ffmpeg_process_count") # ):
) # self.queue.set_max_ffmpeg_count(
# P.ModelSetting.get_int("anilife_max_ffmpeg_process_count")
# )
def scheduler_function(self): def scheduler_function(self):
logger.debug(f"ohli24 scheduler_function::=========================") logger.debug(f"anilife scheduler_function:: =========================")
content_code_list = P.ModelSetting.get_list("anilife_auto_code_list", "|")
content_code_list = P.ModelSetting.get_list("ohli24_auto_code_list", "|")
url = f'{P.ModelSetting.get("anilife_url")}/dailyani'
if "all" in content_code_list: if "all" in content_code_list:
url = f'{P.ModelSetting.get("anilife_url")}/dailyani'
ret_data = LogicAniLife.get_auto_anime_info(self, url=url) ret_data = LogicAniLife.get_auto_anime_info(self, url=url)
elif len(content_code_list) > 0:
for item in content_code_list:
url = P.ModelSetting.get("anilife_url") + "/detail/id/" + item
print("scheduling url: %s", url)
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
content_info = self.get_series_info(item)
# logger.debug(content_info)
# exit()
for episode_info in content_info["episode"]:
add_ret = self.add(episode_info)
if add_ret.startswith("enqueue"):
self.socketio_callback("list_refresh", "")
# logger.debug(f"data: {data}")
# self.current_data = data
# db에서 다운로드 완료 유무 체크
def plugin_load(self): def plugin_load(self):
self.queue = FfmpegQueue( self.queue = FfmpegQueue(
P, P.ModelSetting.get_int("anilife_max_ffmpeg_process_count") P, P.ModelSetting.get_int("anilife_max_ffmpeg_process_count")
@@ -1027,6 +1104,14 @@ class LogicAniLife(LogicModuleBase):
.strip() .strip()
) )
regex = r"\t+"
subst = ""
regex1 = r"[\n]+"
subst1 = "<br/>"
des_items1 = re.sub(regex, subst, des_items1, 0, re.MULTILINE)
des_items1 = re.sub(regex1, subst1, des_items1, 0, re.MULTILINE)
# print(des_items1)
des = {} des = {}
des_key = [ des_key = [
"_otit", "_otit",
@@ -1114,6 +1199,9 @@ class LogicAniLife(LogicModuleBase):
"episode": episodes, "episode": episodes,
} }
if not P.ModelSetting.get_bool("anilife_order_desc"):
data["episode"] = list(reversed(data["episode"]))
data["list_order"] = "desc"
return data return data
except Exception as e: except Exception as e:
@@ -1265,7 +1353,14 @@ class AniLifeQueueEntity(FfmpegQueueEntity):
db_entity = ModelAniLifeItem.get_by_anilife_id(self.info["_id"]) db_entity = ModelAniLifeItem.get_by_anilife_id(self.info["_id"])
if db_entity is not None: if db_entity is not None:
db_entity.status = "completed" db_entity.status = "completed"
db_entity.complated_time = datetime.now() db_entity.completed_time = datetime.now()
db_entity.save()
def donwload_completed(self):
db_entity = ModelAniLifeItem.get_by_anilife_id(self.info["_id"])
if db_entity is not None:
db_entity.status = "completed"
db_entity.completed_time = datetime.now()
db_entity.save() db_entity.save()
def make_episode_info(self): def make_episode_info(self):
@@ -1362,24 +1457,27 @@ class AniLifeQueueEntity(FfmpegQueueEntity):
self.filename = Util.change_text_for_use_filename(ret) self.filename = Util.change_text_for_use_filename(ret)
logger.info(f"self.filename::> {self.filename}") logger.info(f"self.filename::> {self.filename}")
self.savepath = P.ModelSetting.get("ohli24_download_path") self.savepath = P.ModelSetting.get("anilife_download_path")
logger.info(f"self.savepath::> {self.savepath}") logger.info(f"self.savepath::> {self.savepath}")
if P.ModelSetting.get_bool("ohli24_auto_make_folder"): if P.ModelSetting.get_bool("anilife_auto_make_folder"):
if self.info["day"].find("완결") != -1: if self.info["day"].find("완결") != -1:
folder_name = "%s %s" % ( folder_name = "%s %s" % (
P.ModelSetting.get("ohli24_finished_insert"), P.ModelSetting.get("anilife_finished_insert"),
self.content_title, self.content_title,
) )
else: else:
folder_name = self.content_title folder_name = self.content_title
folder_name = Util.change_text_for_use_filename(folder_name.strip()) folder_name = Util.change_text_for_use_filename(folder_name.strip())
self.savepath = os.path.join(self.savepath, folder_name) self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"): if P.ModelSetting.get_bool("anilife_auto_make_season_folder"):
self.savepath = os.path.join( self.savepath = os.path.join(
self.savepath, "Season %s" % int(self.season) self.savepath, "Season %s" % int(self.season)
) )
self.filepath = os.path.join(self.savepath, self.filename) self.filepath = os.path.join(self.savepath, self.filename)
# print(self.filepath)
# exit
if not os.path.exists(self.savepath): if not os.path.exists(self.savepath):
os.makedirs(self.savepath) os.makedirs(self.savepath)
@@ -1509,9 +1607,9 @@ class ModelAniLifeItem(db.Model):
item.episode_no = q["epi_queue"] item.episode_no = q["epi_queue"]
item.title = q["content_title"] item.title = q["content_title"]
item.episode_title = q["title"] item.episode_title = q["title"]
item.ohli24_va = q["va"] item.anilife_va = q["va"]
item.ohli24_vi = q["_vi"] item.anilife_vi = q["_vi"]
item.ohli24_id = q["_id"] item.anilife_id = q["_id"]
item.quality = q["quality"] item.quality = q["quality"]
item.filepath = q["filepath"] item.filepath = q["filepath"]
item.filename = q["filename"] item.filename = q["filename"]
@@ -1520,5 +1618,5 @@ class ModelAniLifeItem(db.Model):
item.vtt_url = q["vtt"] item.vtt_url = q["vtt"]
item.thumbnail = q["thumbnail"] item.thumbnail = q["thumbnail"]
item.status = "wait" item.status = "wait"
item.ohli24_info = q["anilife_info"] item.anilife_info = q["anilife_info"]
item.save() item.save()

View File

@@ -31,11 +31,16 @@ from framework.util import Util
from framework.common.util import headers from framework.common.util import headers
from plugin import ( from plugin import (
LogicModuleBase, LogicModuleBase,
FfmpegQueueEntity,
FfmpegQueue,
default_route_socketio, default_route_socketio,
# FfmpegQueue,
# FfmpegQueueEntity,
) )
# 철자가 틀린 부분이 있어서 분리함
#
from .lib.plugin import FfmpegQueue, FfmpegQueueEntity
from .lib.utils import yommi_timeit
packages = ["beautifulsoup4", "requests-cache", "cloudscraper"] packages = ["beautifulsoup4", "requests-cache", "cloudscraper"]
for _package in packages: for _package in packages:
@@ -115,7 +120,7 @@ class LogicLinkkf(LogicModuleBase):
"linkkf_auto_start": "False", "linkkf_auto_start": "False",
"linkkf_interval": "* 5 * * *", "linkkf_interval": "* 5 * * *",
"linkkf_auto_mode_all": "False", "linkkf_auto_mode_all": "False",
"linkkf_auto_code_list": "all", "linkkf_auto_code_list": "",
"linkkf_current_code": "", "linkkf_current_code": "",
"linkkf_uncompleted_auto_enqueue": "False", "linkkf_uncompleted_auto_enqueue": "False",
"linkkf_image_url_prefix_series": "", "linkkf_image_url_prefix_series": "",
@@ -219,13 +224,25 @@ class LogicLinkkf(LogicModuleBase):
elif sub == "db_remove": elif sub == "db_remove":
return jsonify(ModelLinkkfItem.delete_by_id(req.form["id"])) return jsonify(ModelLinkkfItem.delete_by_id(req.form["id"]))
elif sub == "add_whitelist": elif sub == "add_whitelist":
pass # params = request.get_data()
# logger.debug(f"params: {params}")
# data_code = request.args.get("data_code")
params = request.get_json()
logger.debug(f"params:: {params}")
if params is not None:
code = params["data_code"]
logger.debug(f"params: {code}")
ret = LogicLinkkf.add_whitelist(code)
else:
ret = LogicLinkkf.add_whitelist()
return jsonify(ret)
except Exception as e: except Exception as e:
P.logger.error(f"Exception: {str(e)}") P.logger.error(f"Exception: {str(e)}")
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
@staticmethod @staticmethod
@yommi_timeit
def get_html(url: str, timeout: int = 10, cached=False): def get_html(url: str, timeout: int = 10, cached=False):
try: try:
@@ -295,7 +312,7 @@ class LogicLinkkf(LogicModuleBase):
def add_whitelist(*args): def add_whitelist(*args):
ret = {} ret = {}
logger.debug(f"args: {args}") # logger.debug(f"args: {args}")
try: try:
if len(args) == 0: if len(args) == 0:
@@ -345,12 +362,14 @@ class LogicLinkkf(LogicModuleBase):
return ret return ret
def setting_save_after(self): def setting_save_after(self):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int( # Todo:
"linkkf_max_ffmpeg_process_count" pass
): # if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
self.queue.set_max_ffmpeg_count( # "linkkf_max_ffmpeg_process_count"
P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count") # ):
) # self.queue.set_max_ffmpeg_count(
# P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
# )
def get_video_url_from_url(url, url2): def get_video_url_from_url(url, url2):
video_url = None video_url = None
@@ -695,7 +714,7 @@ class LogicLinkkf(LogicModuleBase):
data = {"ret": "success", "page": page} data = {"ret": "success", "page": page}
response_data = LogicLinkkf.get_html(url, timeout=10) response_data = LogicLinkkf.get_html(url, timeout=10)
# P.logger.debug(response_data) # P.logger.debug(response_data)
P.logger.debug("debug.....................") # P.logger.debug("debug.....................")
tree = html.fromstring(response_data) tree = html.fromstring(response_data)
tmp_items = tree.xpath(items_xpath) tmp_items = tree.xpath(items_xpath)
@@ -743,7 +762,7 @@ class LogicLinkkf(LogicModuleBase):
url = "%s/%s" % (P.ModelSetting.get("linkkf_url"), code) url = "%s/%s" % (P.ModelSetting.get("linkkf_url"), code)
logger.info(url) logger.info(url)
logger.debug(LogicLinkkf.headers) # logger.debug(LogicLinkkf.headers)
html_content = LogicLinkkf.get_html(url, cached=False) html_content = LogicLinkkf.get_html(url, cached=False)
# html_content = LogicLinkkf.get_html_playwright(url) # html_content = LogicLinkkf.get_html_playwright(url)
# html_content = LogicLinkkf.get_html_cloudflare(url, cached=False) # html_content = LogicLinkkf.get_html_cloudflare(url, cached=False)
@@ -847,7 +866,7 @@ class LogicLinkkf(LogicModuleBase):
else: else:
tags = soup.select("ul > a") tags = soup.select("ul > a")
logger.debug(len(tags)) logger.debug(f"count: {len(tags)}")
# logger.info("tags", tags) # logger.info("tags", tags)
# re1 = re.compile(r'\/(?P<code>\d+)') # re1 = re.compile(r'\/(?P<code>\d+)')
@@ -863,14 +882,16 @@ class LogicLinkkf(LogicModuleBase):
idx = 1 idx = 1
for t in tags: for t in tags:
entity = { entity = {
"_id": data["code"], "code": data["code"],
"program_code": data["code"], "program_code": data["code"],
"program_title": data["title"], "program_title": data["title"],
"day": "",
"save_folder": Util.change_text_for_use_filename( "save_folder": Util.change_text_for_use_filename(
data["save_folder"] data["save_folder"]
), ),
"title": t.text.strip(), "title": t.text.strip(),
# "title": t.text_content().strip(), "episode_no": t.text.strip()
# "title": data["title"],
} }
# entity['code'] = re1.search(t.attrib['href']).group('code') # entity['code'] = re1.search(t.attrib['href']).group('code')
@@ -887,9 +908,9 @@ class LogicLinkkf(LogicModuleBase):
# logger.debug(f"m_obj::> {m_obj}") # logger.debug(f"m_obj::> {m_obj}")
if m_obj is not None: if m_obj is not None:
episode_code = m_obj.group(1) episode_code = m_obj.group(1)
entity["code"] = data["code"] + episode_code.zfill(4) entity["_id"] = data["code"] + episode_code.zfill(4)
else: else:
entity["code"] = data["code"] entity["_id"] = data["code"]
aa = t["href"] aa = t["href"]
if "/player" in aa: if "/player" in aa:
@@ -917,6 +938,7 @@ class LogicLinkkf(LogicModuleBase):
data["episode"].append(entity) data["episode"].append(entity)
idx = idx + 1 idx = idx + 1
# logger.debug(f"{data}")
data["ret"] = True data["ret"] = True
# logger.info('data', data) # logger.info('data', data)
self.current_data = data self.current_data = data
@@ -962,7 +984,7 @@ class LogicLinkkf(LogicModuleBase):
ret = "%s.S%sE%s.720p-LK.mp4" % (maintitle, season, epi_no) ret = "%s.S%sE%s.720p-LK.mp4" % (maintitle, season, epi_no)
else: else:
logger.debug("NOT MATCH") logger.debug("NOT MATCH")
ret = "%s.720p-SA.mp4" % maintitle ret = "%s.720p-LK.mp4" % maintitle
return Util.change_text_for_use_filename(ret) return Util.change_text_for_use_filename(ret)
except Exception as e: except Exception as e:
@@ -970,21 +992,22 @@ class LogicLinkkf(LogicModuleBase):
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
def add(self, episode_info): def add(self, episode_info):
print("episode_info") # logger.debug("episode_info")
logger.debug(episode_info) # logger.debug(episode_info)
if self.is_exist(episode_info): if self.is_exist(episode_info):
return "queue_exist" return "queue_exist"
else: else:
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["_id"]) db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["code"])
logger.debug("db_entity:::> %s", db_entity) # logger.debug("db_entity:::> %s", db_entity)
# logger.debug("db_entity.status ::: %s", db_entity.status) # logger.debug("db_entity.status ::: %s", db_entity.status)
if db_entity is None: if db_entity is None:
entity = LinkkfQueueEntity(P, self, episode_info) entity = LinkkfQueueEntity(P, self, episode_info)
logger.debug("entity:::> %s", entity.as_dict()) # logger.debug("entity:::> %s", entity.as_dict())
ModelLinkkfItem.append(entity.as_dict()) ModelLinkkfItem.append(entity.as_dict())
# # logger.debug("entity:: type >> %s", type(entity)) # # logger.debug("entity:: type >> %s", type(entity))
# #
@@ -1016,6 +1039,35 @@ class LogicLinkkf(LogicModuleBase):
return True return True
return False return False
def scheduler_function(self):
logger.debug(f"linkkf scheduler_function:: =========================")
content_code_list = P.ModelSetting.get_list("linkkf_auto_code_list", "|")
# logger.debug(content_code_list)
if "all" in content_code_list:
url = f'{P.ModelSetting.get("linkkf_url")}/dailyani'
ret_data = LogicLinkkf.get_auto_anime_info(self, url=url)
elif len(content_code_list) > 0:
for item in content_code_list:
url = P.ModelSetting.get("linkkf_url") + "/" + item
print("scheduling url: %s", url)
# exit()
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
content_info = self.get_series_info(item)
# logger.debug(content_info["episode"])
# exit()
for episode_info in content_info["episode"]:
add_ret = self.add(episode_info)
if add_ret.startswith("enqueue"):
self.socketio_callback("list_refresh", "")
# logger.debug(f"data: {data}")
# self.current_data = data
# db에서 다운로드 완료 유무 체크
# @staticmethod # @staticmethod
def plugin_load(self): def plugin_load(self):
try: try:
@@ -1082,28 +1134,33 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"]) db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None: if db_entity is not None:
db_entity.status = "completed" db_entity.status = "completed"
db_entity.complated_time = datetime.now() db_entity.completed_time = datetime.now()
db_entity.save() db_entity.save()
def donwload_completed(self): def donwload_completed(self):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"]) db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None: if db_entity is not None:
db_entity.status = "completed" db_entity.status = "completed"
db_entity.complated_time = datetime.now() db_entity.completed_time = datetime.now()
db_entity.save() db_entity.save()
# Get episode info from site # Get episode info from site
def make_episode_info(self): def make_episode_info(self):
logger.debug("call make_episode_info(): ")
url2s = [] url2s = []
url = None url = None
logger.debug(self.info) # logger.debug(self)
logger.debug(self.info["url"]) # print("")
# logger.debug(self.info)
# logger.debug(f'self.info:: {self.info["url"]}')
# exit()
try: try:
# logger.debug(self) # logger.debug(self)
# logger.debug(self.url) # logger.debug(self.url)
data = LogicLinkkf.get_html_episode_content(self.info["url"]) data = LogicLinkkf.get_html_episode_content(self.info["url"])
# logger.debug(f"data:: {data}")
# exit()
tree = html.fromstring(data) tree = html.fromstring(data)
xpath_select_query = '//*[@id="body"]/div/span/center/select/option' xpath_select_query = '//*[@id="body"]/div/span/center/select/option'
@@ -1153,7 +1210,7 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
continue continue
# logger.debug(f"url: {url}, url2: {url2}") # logger.debug(f"url: {url}, url2: {url2}")
ret = LogicLinkkf.get_video_url_from_url(url, url2) ret = LogicLinkkf.get_video_url_from_url(url, url2)
logger.debug(f"ret::::> {ret}") # logger.debug(f"ret::::> {ret}")
if ret is not None: if ret is not None:
video_url = ret video_url = ret
@@ -1164,12 +1221,93 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
# logger.info(video_url) # logger.info(video_url)
# return [video_url, referer_url] # return [video_url, referer_url]
return video_url # return video_url
logger.debug(video_url)
logger.info("dx: urls2:: %s", url2s) logger.info("dx: urls2:: %s", url2s)
video_url = None self.url = video_url[0]
referer_url = None # dx base_url = "https://kfani.me"
self.srt_url = base_url + video_url[2]
match = re.compile(
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)"
% ("", "")
).search(self.info["program_title"])
#
# epi_no 초기값
epi_no = 1
#
logger.debug(match)
if match:
self.content_title = match.group("title").strip()
# if "season" in match.groupdict() and match.group("season") is not None:
# self.season = int(match.group("season"))
#
# # epi_no = 1
# epi_no = int(match.group("epi_no"))
# ret = "%s.S%sE%s.%s-LK.mp4" % (
# self.content_title,
# "0%s" % self.season if self.season < 10 else self.season,
# "0%s" % epi_no if epi_no < 10 else epi_no,
# self.quality,
# )
else:
self.content_title = self.info["program_title"]
# P.logger.debug("NOT MATCH")
# ret = "%s.720p-LK.mp4" % self.info["program_title"]
# logger.info('self.content_title:: %s', self.content_title)
self.epi_queue = epi_no
# self.filename = Util.change_text_for_use_filename(ret)
self.filename = self.info["filename"]
logger.info(f"self.filename::> {self.filename}")
self.savepath = P.ModelSetting.get("linkkf_download_path")
logger.info(f"self.savepath::> {self.savepath}")
# TODO: 완결 처리
folder_name = None
if P.ModelSetting.get_bool("linkkf_auto_make_folder"):
if self.info["day"].find("완결") != -1:
folder_name = "%s %s" % (
P.ModelSetting.get("linkkf_finished_insert"),
self.content_title,
)
else:
folder_name = self.content_title
# logger.debug(f"folder_name:: {folder_name}")
# logger.debug(f"self.content_title:: {self.content_title}")
folder_name = Util.change_text_for_use_filename(folder_name.strip())
self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("linkkf_auto_make_season_folder"):
self.savepath = os.path.join(
self.savepath, "Season %s" % int(self.season)
)
self.filepath = os.path.join(self.savepath, self.filename)
if not os.path.exists(self.savepath):
os.makedirs(self.savepath)
from framework.common.util import write_file, convert_vtt_to_srt
srt_filepath = os.path.join(
self.savepath, self.filename.replace(".mp4", ".ko.srt")
)
if (
self.srt_url is not None
and not os.path.exists(srt_filepath)
and not ("thumbnails.vtt" in self.srt_url)
):
srt_data = requests.get(self.srt_url, headers=headers).text
write_file(srt_data, srt_filepath)
except Exception as e: except Exception as e:
logger.error(f"Exception: {str(e)}") logger.error(f"Exception: {str(e)}")
@@ -1190,6 +1328,7 @@ class ModelLinkkfItem(db.Model):
title = db.Column(db.String) title = db.Column(db.String)
episode_title = db.Column(db.String) episode_title = db.Column(db.String)
# linkkf_va = db.Column(db.String) # linkkf_va = db.Column(db.String)
linkkf_code = db.Column(db.String)
linkkf_vi = db.Column(db.String) linkkf_vi = db.Column(db.String)
linkkf_id = db.Column(db.String) linkkf_id = db.Column(db.String)
quality = db.Column(db.String) quality = db.Column(db.String)
@@ -1285,7 +1424,7 @@ class ModelLinkkfItem(db.Model):
@classmethod @classmethod
def append(cls, q): def append(cls, q):
logger.debug(q) # logger.debug(q)
item = ModelLinkkfItem() item = ModelLinkkfItem()
item.content_code = q["program_code"] item.content_code = q["program_code"]
item.season = q["season"] item.season = q["season"]

View File

@@ -7,12 +7,16 @@
# @Software: PyCharm # @Software: PyCharm
import os, sys, traceback, re, json, threading import os, sys, traceback, re, json, threading
import time
from datetime import datetime, date from datetime import datetime, date
import copy import copy
import hashlib import hashlib
import discord
# third-party # third-party
import requests import requests
from discord_webhook import DiscordWebhook, DiscordEmbed
from lxml import html from lxml import html
from urllib import parse from urllib import parse
import urllib import urllib
@@ -25,6 +29,8 @@ from flask import request, render_template, jsonify
from sqlalchemy import or_, and_, func, not_, desc from sqlalchemy import or_, and_, func, not_, desc
from pip._internal import main from pip._internal import main
from .lib.utils import yommi_timeit
pkgs = ["beautifulsoup4", "jsbeautifier", "aiohttp"] pkgs = ["beautifulsoup4", "jsbeautifier", "aiohttp"]
for pkg in pkgs: for pkg in pkgs:
try: try:
@@ -60,9 +66,11 @@ logger = P.logger
class LogicOhli24(LogicModuleBase): class LogicOhli24(LogicModuleBase):
db_default = { db_default = {
"ohli24_db_version": "1", "ohli24_db_version": "1.1",
"ohli24_url": "https://ohli24.org", "ohli24_url": "https://a21.ohli24.com",
"ohli24_download_path": os.path.join(path_data, P.package_name, "ohli24"), "ohli24_download_path": os.path.join(
path_data, P.package_name, "ohli24"
),
"ohli24_auto_make_folder": "True", "ohli24_auto_make_folder": "True",
"ohli24_auto_make_season_folder": "True", "ohli24_auto_make_season_folder": "True",
"ohli24_finished_insert": "[완결]", "ohli24_finished_insert": "[완결]",
@@ -100,9 +108,16 @@ class LogicOhli24(LogicModuleBase):
} }
def __init__(self, P): def __init__(self, P):
super(LogicOhli24, self).__init__(P, "setting", scheduler_desc="ohli24 자동 다운로드") super(LogicOhli24, self).__init__(
P, "setting", scheduler_desc="ohli24 자동 다운로드"
)
self.name = "ohli24" self.name = "ohli24"
self.queue = None self.queue = None
self.last_post_title = ""
self.discord_webhook_url = "https://discord.com/api/webhooks/1071430127860334663/viCiM5ssS-U1_ONWgdWa-64KgvPfU5jJ8WQAym-4vkiyASB0e8IcnlLnxG4F40nj10kZ"
self.discord_color = "242424"
self.discord_title = "새로운 애니"
self.DISCORD_CHANNEL_ID = "1071430054023798958"
default_route_socketio(P, self) default_route_socketio(P, self)
@staticmethod @staticmethod
@@ -205,7 +220,9 @@ class LogicOhli24(LogicModuleBase):
try: try:
if engine == "chrome": if engine == "chrome":
browser = await p.chromium.launch( browser = await p.chromium.launch(
channel="chrome", args=browser_args, headless=headless channel="chrome",
args=browser_args,
headless=headless,
) )
elif engine == "webkit": elif engine == "webkit":
browser = await p.webkit.launch( browser = await p.webkit.launch(
@@ -221,9 +238,9 @@ class LogicOhli24(LogicModuleBase):
# user_agent=ua, # user_agent=ua,
# ) # )
LogicOhli24.headers[ LogicOhli24.headers["Referer"] = (
"Referer" "https://anilife.com/detail/id/471"
] = "https://anilife.live/detail/id/471" )
# print(LogicAniLife.headers) # print(LogicAniLife.headers)
LogicOhli24.headers["Referer"] = LogicOhli24.episode_url LogicOhli24.headers["Referer"] = LogicOhli24.episode_url
@@ -233,7 +250,8 @@ class LogicOhli24(LogicModuleBase):
# logger.debug(f"LogicAniLife.headers::: {LogicOhli24.headers}") # logger.debug(f"LogicAniLife.headers::: {LogicOhli24.headers}")
context = await browser.new_context( context = await browser.new_context(
extra_http_headers=LogicOhli24.headers, ignore_https_errors=True extra_http_headers=LogicOhli24.headers,
ignore_https_errors=True,
) )
# await context.add_cookies(LogicOhli24.cookies) # await context.add_cookies(LogicOhli24.cookies)
@@ -327,7 +345,9 @@ class LogicOhli24(LogicModuleBase):
), ),
arg=arg, arg=arg,
) )
return render_template("sample.html", title="%s - %s" % (P.package_name, sub)) return render_template(
"sample.html", title="%s - %s" % (P.package_name, sub)
)
# @staticmethod # @staticmethod
def process_ajax(self, sub, req): def process_ajax(self, sub, req):
@@ -412,7 +432,8 @@ class LogicOhli24(LogicModuleBase):
count += 1 count += 1
notify = { notify = {
"type": "success", "type": "success",
"msg": "%s 개의 에피소드를 큐에 추가 하였습니다." % count, "msg": "%s 개의 에피소드를 큐에 추가 하였습니다."
% count,
} }
socketio.emit( socketio.emit(
"notify", notify, namespace="/framework", broadcast=True "notify", notify, namespace="/framework", broadcast=True
@@ -490,7 +511,7 @@ class LogicOhli24(LogicModuleBase):
def add_whitelist(*args): def add_whitelist(*args):
ret = {} ret = {}
logger.debug(f"args: {args}") # logger.debug(f"args: {args}")
try: try:
if len(args) == 0: if len(args) == 0:
@@ -498,15 +519,13 @@ class LogicOhli24(LogicModuleBase):
else: else:
code = str(args[0]) code = str(args[0])
print(code) # print(code)
whitelist_program = P.ModelSetting.get("ohli24_auto_code_list") whitelist_program = P.ModelSetting.get("ohli24_auto_code_list")
# whitelist_programs = [
# str(x.strip().replace(" ", ""))
# for x in whitelist_program.replace("\n", "|").split("|")
# ]
whitelist_programs = [ whitelist_programs = [
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|") str(x.strip())
for x in whitelist_program.replace("\n", "|").split("|")
] ]
if code not in whitelist_programs: if code not in whitelist_programs:
@@ -533,7 +552,7 @@ class LogicOhli24(LogicModuleBase):
ret["ret"] = False ret["ret"] = False
ret["log"] = "이미 추가되어 있습니다." ret["log"] = "이미 추가되어 있습니다."
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error(f"Exception: {str(e)}")
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
ret["ret"] = False ret["ret"] = False
ret["log"] = str(e) ret["log"] = str(e)
@@ -551,15 +570,25 @@ class LogicOhli24(LogicModuleBase):
# Todo: 스케쥴링 함수 미구현 # Todo: 스케쥴링 함수 미구현
logger.debug(f"ohli24 scheduler_function::=========================") logger.debug(f"ohli24 scheduler_function::=========================")
content_code_list = P.ModelSetting.get_list("ohli24_auto_code_list", "|") content_code_list = P.ModelSetting.get_list(
"ohli24_auto_code_list", "|"
)
logger.debug(f"content_code_list::: {content_code_list}") logger.debug(f"content_code_list::: {content_code_list}")
url_list = ["https://www.naver.com/", "https://www.daum.net/"] url_list = ["https://www.naver.com/", "https://www.daum.net/"]
week = ["월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"] week = [
"월요일",
"화요일",
"수요일",
"목요일",
"금요일",
"토요일",
"일요일",
]
today = date.today() today = date.today()
print(today) # print(today)
print() # print()
print(today.weekday()) # print(today.weekday())
url = f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing&sca={week[today.weekday()]}' url = f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing&sca={week[today.weekday()]}'
@@ -590,10 +619,14 @@ class LogicOhli24(LogicModuleBase):
elif len(content_code_list) > 0: elif len(content_code_list) > 0:
for item in content_code_list: for item in content_code_list:
url = P.ModelSetting.get("ohli24_url") + "/c/" + item url = P.ModelSetting.get("ohli24_url") + "/c/" + item
print("scheduling url: %s", url) logger.debug(f"scheduling url: {url}")
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url) # ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
print("debug===")
print(item)
content_info = self.get_series_info(item, "", "") content_info = self.get_series_info(item, "", "")
# logger.debug(content_info)
for episode_info in content_info["episode"]: for episode_info in content_info["episode"]:
add_ret = self.add(episode_info) add_ret = self.add(episode_info)
if add_ret.startswith("enqueue"): if add_ret.startswith("enqueue"):
@@ -618,6 +651,7 @@ class LogicOhli24(LogicModuleBase):
def get_series_info(self, code, wr_id, bo_table): def get_series_info(self, code, wr_id, bo_table):
code_type = "c" code_type = "c"
code = urllib.parse.quote(code)
try: try:
if ( if (
@@ -664,7 +698,7 @@ class LogicOhli24(LogicModuleBase):
else: else:
pass pass
logger.debug("url:::> %s", url) # logger.debug("url:::> %s", url)
# self.current_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) # self.current_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)
# AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36', # AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36',
@@ -689,6 +723,8 @@ class LogicOhli24(LogicModuleBase):
"_total_chapter", "_total_chapter",
"_show_time", "_show_time",
"_release_year", "_release_year",
"_drawing",
"_character_design"
] ]
description_dict = { description_dict = {
"원제": "_otit", "원제": "_otit",
@@ -709,8 +745,11 @@ class LogicOhli24(LogicModuleBase):
"개봉년도": "_release_year", "개봉년도": "_release_year",
"개봉일": "_opening_date", "개봉일": "_opening_date",
"런타임": "_run_time", "런타임": "_run_time",
"작화": "_drawing",
"캐릭터디자인": "_character_design"
} }
list_body_li = tree.xpath('//ul[@class="list-body"]/li') list_body_li = tree.xpath('//ul[@class="list-body"]/li')
# logger.debug(f"list_body_li:: {list_body_li}") # logger.debug(f"list_body_li:: {list_body_li}")
episodes = [] episodes = []
@@ -744,7 +783,8 @@ class LogicOhli24(LogicModuleBase):
} }
) )
logger.debug(P.ModelSetting.get("ohli24_order_desc")) # 정렬 여부 체크
# logger.debug(P.ModelSetting.get("ohli24_order_desc"))
# if P.ModelSetting.get("ohli24_order_desc") == "False": # if P.ModelSetting.get("ohli24_order_desc") == "False":
# print("Here....") # print("Here....")
# episodes.reverse() # episodes.reverse()
@@ -763,7 +803,7 @@ class LogicOhli24(LogicModuleBase):
# logger.info(f"des::>> {des}") # logger.info(f"des::>> {des}")
image = image.replace("..", P.ModelSetting.get("ohli24_url")) image = image.replace("..", P.ModelSetting.get("ohli24_url"))
# logger.info("images:: %s", image) # logger.info("images:: %s", image)
logger.info("title:: %s", title) # logger.info("title:: %s", title)
ser_description = tree.xpath( ser_description = tree.xpath(
'//div[@class="view-stocon"]/div[@class="c"]/text()' '//div[@class="view-stocon"]/div[@class="c"]/text()'
@@ -817,7 +857,9 @@ class LogicOhli24(LogicModuleBase):
+ page + page
) )
# cate == "complete": # cate == "complete":
logger.info("url:::> %s", url)
# logger.info("url:::> %s", url)
data = {} data = {}
response_data = LogicOhli24.get_html(url, timeout=10) response_data = LogicOhli24.get_html(url, timeout=10)
# response_data = asyncio.run( # response_data = asyncio.run(
@@ -839,12 +881,24 @@ class LogicOhli24(LogicModuleBase):
entity = {} entity = {}
entity["link"] = item.xpath(".//a/@href")[0] entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1] entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[ entity["title"] = item.xpath(
0 ".//div[@class='post-title']/text()"
].strip() )[0].strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ # logger.debug(item.xpath(".//div[@class='img-item']/img/@src")[0])
0 # logger.debug(item.xpath(".//div[@class='img-item']/img/@data-ezsrc")[0])
].replace("..", P.ModelSetting.get("ohli24_url")) # entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
# 0
# ].replace("..", P.ModelSetting.get("ohli24_url"))
if len(item.xpath(".//div[@class='img-item']/img/@src")) > 0:
entity["image_link"] = item.xpath(
".//div[@class='img-item']/img/@src"
)[0].replace("..", P.ModelSetting.get("ohli24_url"))
else:
entity["image_link"] = item.xpath(
".//div[@class='img-item']/img/@data-ezsrc"
)[0]
data["ret"] = "success" data["ret"] = "success"
data["anime_list"].append(entity) data["anime_list"].append(entity)
@@ -869,12 +923,12 @@ class LogicOhli24(LogicModuleBase):
entity = {} entity = {}
entity["link"] = item.xpath(".//a/@href")[0] entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1] entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[ entity["title"] = item.xpath(
0 ".//div[@class='post-title']/text()"
].strip() )[0].strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ entity["image_link"] = item.xpath(
0 ".//div[@class='img-item']/img/@src"
].replace("..", P.ModelSetting.get("ohli24_url")) )[0].replace("..", P.ModelSetting.get("ohli24_url"))
data["ret"] = "success" data["ret"] = "success"
data["anime_list"].append(entity) data["anime_list"].append(entity)
@@ -913,21 +967,80 @@ class LogicOhli24(LogicModuleBase):
entity["title"] = "".join( entity["title"] = "".join(
item.xpath(".//div[@class='post-title']/text()") item.xpath(".//div[@class='post-title']/text()")
).strip() ).strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ entity["image_link"] = item.xpath(
0 ".//div[@class='img-item']/img/@src"
].replace("..", P.ModelSetting.get("ohli24_url")) )[0].replace("..", P.ModelSetting.get("ohli24_url"))
entity["code"] = item.xpath(".//div[@class='img-item']/img/@alt")[0] entity["code"] = item.xpath(
".//div[@class='img-item']/img/@alt"
)[0]
data["ret"] = "success" data["ret"] = "success"
data["anime_list"].append(entity) data["anime_list"].append(entity)
return data return data
except Exception as e: except Exception as e:
P.logger.error("Exception:%s", e) P.logger.error(f"Exception: {str(e)}")
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
return {"ret": "exception", "log": str(e)} return {"ret": "exception", "log": str(e)}
def check_for_new_post(self):
# Get the HTML content of the page
res = requests.get(
f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing'
)
soup = BeautifulSoup(res.content, "html.parser")
# Find the latest post on the page
latest_post = soup.find("div", class_="post-title").text
latest_post_image = (
soup.find("div", class_="img-item")
.find("img", class_="wr-img")
.get("src")
.replace("..", P.ModelSetting.get("ohli24_url"))
)
logger.debug(f"latest_post:: {latest_post}")
logger.debug(f"self.last_post_title:: {self.last_post_title}")
logger.debug(f"latest_post_image:: {latest_post_image}")
# Compare the latest post with the last recorded post
if latest_post != self.last_post_title:
# If there is a new post, update the last recorded post
self.last_post_title = latest_post
# Send a notification to Discord channel
# discord_client = discord.Client()
# discord_client.run(self.DISCORD_BOT_TOKEN)
#
# async def on_ready():
# channel = discord_client.get_channel(self.DISCORD_CHANNEL_ID)
# await channel.send(f"A new post has been added: {latest_post}")
#
# discord_client.close()
webhook = DiscordWebhook(url=self.discord_webhook_url)
embed = DiscordEmbed(
title=self.discord_title, color=self.discord_color
)
embed.set_timestamp()
path = self.last_post_title
embed.set_image(url=latest_post_image)
embed.add_embed_field(name="", value=path, inline=True)
embed.set_timestamp()
webhook.add_embed(embed)
response = webhook.execute()
return self.last_post_title
return self.last_post_title
def send_notify(self):
logger.debug("send_notify() routine")
while True:
self.last_post_title = self.check_for_new_post()
logger.debug(self.last_post_title)
time.sleep(600)
# @staticmethod # @staticmethod
def plugin_load(self): def plugin_load(self):
try: try:
@@ -938,6 +1051,10 @@ class LogicOhli24(LogicModuleBase):
self.current_data = None self.current_data = None
self.queue.queue_start() self.queue.queue_start()
logger.debug(P.ModelSetting.get_bool("ohli24_discord_notify"))
if P.ModelSetting.get_bool("ohli24_discord_notify"):
self.send_notify()
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error("Exception:%s", e)
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
@@ -958,12 +1075,14 @@ class LogicOhli24(LogicModuleBase):
return True return True
@staticmethod @staticmethod
@yommi_timeit
def get_html( def get_html(
url, headers=None, referer=None, stream=False, timeout=5, stealth=False url, headers=None, referer=None, stream=False, timeout=10, stealth=False
): ):
# global response_data # global response_data
data = "" data = ""
# response_date = "" # response_date = ""
logger.debug(f"url: {url}")
try: try:
print("cloudflare protection bypass ==================P") print("cloudflare protection bypass ==================P")
@@ -971,7 +1090,8 @@ class LogicOhli24(LogicModuleBase):
if headers is not None: if headers is not None:
LogicOhli24.headers = headers LogicOhli24.headers = headers
logger.debug(f"headers: {LogicOhli24.headers}") # logger.debug(f"headers: {LogicOhli24.headers}")
# response_data = asyncio.run( # response_data = asyncio.run(
# LogicOhli24.get_html_playwright( # LogicOhli24.get_html_playwright(
# url, # url,
@@ -990,16 +1110,26 @@ class LogicOhli24(LogicModuleBase):
if LogicOhli24.session is None: if LogicOhli24.session is None:
LogicOhli24.session = requests.session() LogicOhli24.session = requests.session()
LogicOhli24.session.verify = False
# logger.debug('get_html :%s', url) # logger.debug('get_html :%s', url)
# LogicOhli24.headers["Referer"] = "" if referer is None else referer # LogicOhli24.headers["Referer"] = "" if referer is None else referer
logger.debug(f"referer:: {referer}") # logger.debug(f"referer:: {referer}")
if referer: if referer:
LogicOhli24.headers["Referer"] = referer LogicOhli24.headers["Referer"] = referer
# logger.info(headers) # logger.info(headers)
logger.debug(f"LogicOhli24.headers:: {LogicOhli24.headers}") # logger.debug(f"LogicOhli24.headers:: {LogicOhli24.headers}")
proxies = {
"http": "http://192.168.0.2:3138",
"https": "http://192.168.0.2:3138",
}
page_content = LogicOhli24.session.get( page_content = LogicOhli24.session.get(
url, headers=LogicOhli24.headers, timeout=timeout url,
headers=LogicOhli24.headers,
timeout=timeout,
proxies=proxies,
) )
response_data = page_content.text response_data = page_content.text
# logger.debug(response_data) # logger.debug(response_data)
@@ -1081,7 +1211,8 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
# Get episode info from OHLI24 site # Get episode info from OHLI24 site
def make_episode_info(self): def make_episode_info(self):
try: try:
base_url = "https://ohli24.org" base_url = "https://a24.ohli24.com"
base_url = P.ModelSetting.get("ohli24_url")
iframe_url = "" iframe_url = ""
# https://ohli24.org/e/%EB%85%B9%EC%9D%84%20%EB%A8%B9%EB%8A%94%20%EB%B9%84%EC%8A%A4%EC%BD%94%206%ED%99%94 # https://ohli24.org/e/%EB%85%B9%EC%9D%84%20%EB%A8%B9%EB%8A%94%20%EB%B9%84%EC%8A%A4%EC%BD%94%206%ED%99%94
@@ -1104,24 +1235,43 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
) )
# logger.debug(text) # logger.debug(text)
soup1 = BeautifulSoup(text, "lxml") soup1 = BeautifulSoup(text, "lxml")
pattern = re.compile(r"url : \"\.\.(.*)\"") # pattern = re.compile(r"url : \"\.\.(.*)\"")
script = soup1.find("script", text=pattern) # script = soup1.find("script", text=pattern)
#
# if script:
# match = pattern.search(script.text)
# if match:
# iframe_url = match.group(1)
# logger.info("iframe_url::> %s", iframe_url)
pattern = r"<iframe src=\"(.*?)\" allowfullscreen>"
if script: match = re.search(pattern, text)
match = pattern.search(script.text) if match:
if match: iframe_src = match.group(1)
iframe_url = match.group(1) logger.debug(f"iframe_src:::> {iframe_src}")
logger.info("iframe_url::> %s", iframe_url)
iframe_url = soup1.find("iframe")["src"]
iframe_src = iframe_url # iframe_src = f'{P.ModelSetting.get("ohli24_url")}{iframe_url}'
iframe_html = LogicOhli24.get_html(
iframe_src, headers=headers, timeout=600
)
# print(iframe_html)
pattern = r"<iframe src=\"(.*?)\" allowfullscreen>"
match = re.search(pattern, iframe_html)
if match:
iframe_src = match.group(1)
print(iframe_src)
logger.debug(f"iframe_src:::> {iframe_src}") logger.debug(f"iframe_src:::> {iframe_src}")
# resp1 = requests.get(iframe_src, headers=headers, timeout=600).text # resp1 = requests.get(iframe_src, headers=headers, timeout=600).text
resp1 = LogicOhli24.get_html(iframe_src, headers=headers, timeout=600) resp1 = LogicOhli24.get_html(
logger.info("resp1::>> %s", resp1) iframe_src, headers=headers, timeout=600
)
# logger.info("resp1::>> %s", resp1)
soup3 = BeautifulSoup(resp1, "lxml") soup3 = BeautifulSoup(resp1, "lxml")
# packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL) # packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL)
s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL) s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL)
@@ -1142,7 +1292,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
logger.debug(type(packed_script)) logger.debug(type(packed_script))
unpack_script = jsbeautifier.beautify(str(packed_script)) unpack_script = jsbeautifier.beautify(str(packed_script))
p1 = re.compile(r"(\"tracks\".*\])\,\"captions\"", re.MULTILINE | re.DOTALL) p1 = re.compile(
r"(\"tracks\".*\])\,\"captions\"", re.MULTILINE | re.DOTALL
)
m2 = re.search( m2 = re.search(
r"(\"tracks\".*\]).*\"captions\"", r"(\"tracks\".*\]).*\"captions\"",
unpack_script, unpack_script,
@@ -1160,7 +1312,10 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
video_hash = iframe_src.split("/") video_hash = iframe_src.split("/")
video_hashcode = re.sub(r"index\.php\?data=", "", video_hash[-1]) video_hashcode = re.sub(r"index\.php\?data=", "", video_hash[-1])
self._vi = video_hashcode self._vi = video_hashcode
logger.debug(f"video_hash::> {video_hash}")
video_info_url = f"{video_hash[0]}//{video_hash[2]}/player/index.php?data={video_hashcode}&do=getVideo" video_info_url = f"{video_hash[0]}//{video_hash[2]}/player/index.php?data={video_hashcode}&do=getVideo"
# video_info_url = f"{video_hash[0]}//michealcdn.com/player/index.php?data={video_hashcode}&do=getVideo"
# print('hash:::', video_hash) # print('hash:::', video_hash)
logger.debug(f"video_info_url::: {video_info_url}") logger.debug(f"video_info_url::: {video_info_url}")
@@ -1169,10 +1324,11 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) " "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36" "Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"
"Mozilla/5.0 (Macintosh; Intel " "Mozilla/5.0 (Macintosh; Intel "
"Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 " "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/116.0.0.0 Safari/537.36"
"Whale/3.12.129.46 Safari/537.36", "Whale/3.12.129.46 Safari/537.36",
"X-Requested-With": "XMLHttpRequest", "X-Requested-With": "XMLHttpRequest",
"Cookie": "PHPSESSID=hhhnrora8o9omv1tljq4efv216; 2a0d2363701f23f8a75028924a3af643=NDkuMTYzLjExMS4xMDk=; e1192aefb64683cc97abb83c71057733=aW5n", "Cookie": "PHPSESSID=b6hnl2crfvtg36sm6rjjkso4p0; 2a0d2363701f23f8a75028924a3af643=MTgwLjY2LjIyMi4xODk%3D; _ga=GA1.1.586565509.1695135593; __gads=ID=60e47defb3337e02-227f0fc9e3e3009a:T=1695135593:RT=1695135593:S=ALNI_MagY46XGCbx9E4Et2DRzfUHdTAKsg; __gpi=UID=00000c4bb3d077c8:T=1695135593:RT=1695135593:S=ALNI_MYvj_8OjdhtGPEGoXhPsQWq1qye8Q; _ga_MWWDFMDJR0=GS1.1.1695135593.1.1.1695135599.0.0.0",
} }
payload = { payload = {
@@ -1202,7 +1358,15 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
} }
self.url = stream_info[1].strip() self.url = stream_info[1].strip()
match = re.compile(r'NAME="(?P<quality>.*?)"').search(stream_info[0]) logger.info(self.url)
if "anibeast.com" in self.url:
self.headers["Referer"] = iframe_src
if "crazypatutu.com" in self.url:
self.headers["Referer"] = iframe_src
match = re.compile(r'NAME="(?P<quality>.*?)"').search(
stream_info[0]
)
self.quality = "720P" self.quality = "720P"
if match is not None: if match is not None:
self.quality = match.group("quality") self.quality = match.group("quality")
@@ -1218,7 +1382,10 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
if match: if match:
self.content_title = match.group("title").strip() self.content_title = match.group("title").strip()
if "season" in match.groupdict() and match.group("season") is not None: if (
"season" in match.groupdict()
and match.group("season") is not None
):
self.season = int(match.group("season")) self.season = int(match.group("season"))
# epi_no = 1 # epi_no = 1
@@ -1251,7 +1418,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
) )
else: else:
folder_name = self.content_title folder_name = self.content_title
folder_name = Util.change_text_for_use_filename(folder_name.strip()) folder_name = Util.change_text_for_use_filename(
folder_name.strip()
)
self.savepath = os.path.join(self.savepath, folder_name) self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"): if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"):
self.savepath = os.path.join( self.savepath = os.path.join(
@@ -1281,7 +1450,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
class ModelOhli24Item(db.Model): class ModelOhli24Item(db.Model):
__tablename__ = "{package_name}_ohli24_item".format(package_name=P.package_name) __tablename__ = "{package_name}_ohli24_item".format(
package_name=P.package_name
)
__table_args__ = {"mysql_collate": "utf8_general_ci"} __table_args__ = {"mysql_collate": "utf8_general_ci"}
__bind_key__ = P.package_name __bind_key__ = P.package_name
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
@@ -1366,20 +1537,26 @@ class ModelOhli24Item(db.Model):
conditions = [] conditions = []
for tt in tmp: for tt in tmp:
if tt != "": if tt != "":
conditions.append(cls.filename.like("%" + tt.strip() + "%")) conditions.append(
cls.filename.like("%" + tt.strip() + "%")
)
query = query.filter(or_(*conditions)) query = query.filter(or_(*conditions))
elif search.find(",") != -1: elif search.find(",") != -1:
tmp = search.split(",") tmp = search.split(",")
for tt in tmp: for tt in tmp:
if tt != "": if tt != "":
query = query.filter(cls.filename.like("%" + tt.strip() + "%")) query = query.filter(
cls.filename.like("%" + tt.strip() + "%")
)
else: else:
query = query.filter(cls.filename.like("%" + search + "%")) query = query.filter(cls.filename.like("%" + search + "%"))
if option == "completed": if option == "completed":
query = query.filter(cls.status == "completed") query = query.filter(cls.status == "completed")
query = ( query = (
query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id) query.order_by(desc(cls.id))
if order == "desc"
else query.order_by(cls.id)
) )
return query return query

67
static/js/sjva_global1.js Normal file
View File

@@ -0,0 +1,67 @@
function global_sub_request_search(page, move_top=true) {
var formData = get_formdata('#form_search')
formData += '&page=' + page;
$.ajax({
url: '/' + package_name + '/ajax/' + sub + '/web_list',
type: "POST",
cache: false,
data: formData,
dataType: "json",
success: function (data) {
current_data = data;
if (move_top)
window.scrollTo(0,0);
make_list(data.list)
make_page_html(data.paging)
}
});
}
function get_formdata(form_id) {
// on, off 일수도 있으니 모두 True, False로 통일하고
// 밑에서는 False인 경우 값이 추가되지 않으니.. 수동으로 넣어줌
var checkboxs = $(form_id + ' input[type=checkbox]');
//for (var i in checkboxs) {
for (var i =0 ; i < checkboxs.length; i++) {
if ( $(checkboxs[i]).is(':checked') ) {
$(checkboxs[i]).val('True');
} else {
$(checkboxs[i]).val('False');
}
}
var formData = $(form_id).serialize();
$.each($(form_id + ' input[type=checkbox]')
.filter(function(idx) {
return $(this).prop('checked') === false
}),
function(idx, el) {
var emptyVal = "False";
formData += '&' + $(el).attr('name') + '=' + emptyVal;
}
);
formData = formData.replace("&global_scheduler=True", "")
formData = formData.replace("&global_scheduler=False", "")
formData = formData.replace("global_scheduler=True&", "")
formData = formData.replace("global_scheduler=False&", "")
return formData;
}
function globalRequestSearch2(page, move_top = true) {
var formData = getFormdata("#form_search")
formData += "&page=" + page
console.log(formData)
$.ajax({
url: "/" + PACKAGE_NAME + "/ajax/" + MODULE_NAME + "/web_list2",
type: "POST",
cache: false,
data: formData,
dataType: "json",
success: function (data) {
current_data = data
if (move_top) window.scrollTo(0, 0)
make_list(data.list)
make_page_html(data.paging)
},
})
}

204
static/js/sjva_ui14.js Normal file
View File

@@ -0,0 +1,204 @@
function m_row_start(padding='10', align='center') {
var str = '<div class="row" style="padding-top: '+padding+'px; padding-bottom:'+padding+'px; align-items:'+align+';">';
return str;
}
function m_row_start_hover(padding='10', align='center') {
var str = '<div class="row my_hover" style="padding-top: '+padding+'px; padding-bottom:'+padding+'px; align-items:'+align+';">';
return str;
}
function m_row_start_top(padding='10') {
return m_row_start(padding, 'top');
}
function m_row_start_color(padding='10', align='center', color='') {
var str = '<div class="row" style="padding-top: '+padding+'px; padding-bottom:'+padding+'px; align-items:'+align+'; background-color:'+color+'">';
return str;
}
function m_row_start_color2(padding='10', align='center') {
var str = '<div class="row bg-dark text-white" style="padding-top: '+padding+'px; padding-bottom:'+padding+'px; align-items:'+align+';">';
return str;
}
function m_row_end() {
var str = '</div>';
return str;
}
//border
function m_col(w, h, align='left') {
var str = '<div class="col-sm-' + w + ' " style="text-align: '+align+'; word-break:break-all;">';
str += h
str += '</div>';
return str
}
function m_col2(w, h, align='left') {
var str = '<div class="col-sm-' + w + ' " style="padding:5px; margin:0px; text-align: '+align+'; word-break:break-all;">';
str += h
str += '</div>';
return str
}
function m_button_group(h) {
var str = '<div class="btn-group btn-group-sm flex-wrap mr-2" role="group">';
str += h
str += '</div>';
return str;
}
function m_button(id, text, data) {
var str = '<button id="'+id+'" name="'+id+'" class="btn btn-sm btn-outline-success" '
for ( var i in data) {
str += ' data-' + data[i].key + '="' + data[i].value+ '" '
}
str += '>' + text + '</button>';
return str;
}
function m_button2(id, text, data, outline_color) {
var str = '<button id="'+id+'" name="'+id+'" class="btn btn-sm btn-outline-'+outline_color+'" '
for ( var i in data) {
str += ' data-' + data[i].key + '="' + data[i].value+ '" '
}
str += '>' + text + '</button>';
return str;
}
function m_hr(margin='5') {
var str = '<hr style="width: 100%; margin:'+margin+'px;" />';
return str;
}
function m_hr_black() {
var str = '<hr style="width: 100%; color: black; height: 2px; background-color:black;" />';
return str;
}
// 체크박스는 자바로 하면 on/off 스크립트가 안먹힘.
function m_modal(data='EMPTY', title='JSON', json=true) {
document.getElementById("modal_title").innerHTML = title;
if (json) {
data = JSON.stringify(data, null, 2);
}
document.getElementById("modal_body").innerHTML = "<pre>"+ data + "</pre>";;
$("#large_modal").modal();
}
function m_tab_head(name, active) {
if (active) {
var str = '<a class="nav-item nav-link active" id="id_'+name+'" data-toggle="tab" href="#'+name+'" role="tab">'+name+'</a>';
} else {
var str = '<a class="nav-item nav-link" id="id_'+name+'" data-toggle="tab" href="#'+name+'" role="tab">'+name+'</a>';
}
return str;
}
function m_tab_content(name, content, active) {
if (active) {
var str = '<div class="tab-pane fade show active" id="'+name+'" role="tabpanel" >';
} else {
var str = '<div class="tab-pane fade show" id="'+name+'" role="tabpanel" >';
}
str += content;
str += '</div>'
return str;
}
function m_progress(id, width, label) {
var str = '';
str += '<div class="progress" style="height: 25px;">'
str += '<div id="'+id+'" class="progress-bar" style="background-color:yellow;width:'+width+'%"></div>';
str += '<div id="'+id+'_label" class="justify-content-center d-flex w-100 position-absolute" style="margin-top:2px">'+label+'</div>';
str += '</div>'
return str;
}
function m_progress2(id, width, label) {
var str = '';
str += '<div class="progress" style="height: 25px;">'
str += '<div id="'+id+'" class="progress-bar" style="background-color:yellow;width:'+width+'%"></div>';
str += '<div id="'+id+'_label" class="justify-content-center d-flex w-100 position-absolute" style="margin:0px; margin-top:2px">'+label+'</div>';
str += '</div>'
return str;
}
function make_page_html(data) {
str = ' \
<div class="d-inline-block"></div> \
<div class="row mb-3"> \
<div class="col-sm-12"> \
<div class="btn-toolbar" style="justify-content: center;" role="toolbar" aria-label="Toolbar with button groups" > \
<div class="btn-group btn-group-sm mr-2" role="group" aria-label="First group">'
if (data.prev_page) {
str += '<button id="page" data-page="' + (data.start_page-1) + '" type="button" class="btn btn-secondary">&laquo;</button>'
}
for (var i = data.start_page ; i <= data.last_page ; i++) {
str += '<button id="page" data-page="' + i +'" type="button" class="btn btn-secondary" ';
if (i == data.current_page) {
str += 'disabled';
}
str += '>'+i+'</button>';
}
if (data.next_page) {
str += '<button id="page" data-page="' + (data.last_page+1) + '" type="button" class="btn btn-secondary">&raquo;</button>'
}
str += '</div> \
</div> \
</div> \
</div> \
'
document.getElementById("page1").innerHTML = str;
document.getElementById("page2").innerHTML = str;
}
function use_collapse(div, reverse=false) {
var ret = $('#' + div).prop('checked');
if (reverse) {
if (ret) {
$('#' + div + '_div').collapse('hide')
} else {
$('#' + div + '_div').collapse('show')
}
} else {
if (ret) {
$('#' + div + '_div').collapse('show')
} else {
$('#' + div + '_div').collapse('hide')
}
}
}
// primary, secondary, success, danger, warning, info, light, dark, white
function j_button(id, text, data={}, color='primary', outline=true, small=false, _class='') {
let str = '<button id="'+id+'" name="'+id+'" class="btn btn-sm btn';
if (outline) {
str += '-outline';
}
str += '-' + color+'';
str += ' ' + _class;
if (small) {
str += ' py-0" style="font-size: 0.8em;"';
} else {
str += '" ';
}
for ( var key in data) {
str += ' data-' + key + '="' + data[key]+ '" '
}
str += '>' + text + '</button>';
return str;
}

File diff suppressed because it is too large Load Diff

View File

@@ -2,9 +2,9 @@
{% block content %} {% block content %}
<div> <div>
<form id="form_search" class="form-inline" style="text-align:left"> <form id="form_search" class="form-inline" style="text-align:left">
<div class="container-fluid"> <div class="container-fluid">
<div class="row show-grid"> <div class="row show-grid">
<span class="col-md-4"> <span class="col-md-4">
<select id="order" name="order" class="form-control form-control-sm"> <select id="order" name="order" class="form-control form-control-sm">
<option value="desc">최근순</option> <option value="desc">최근순</option>
@@ -15,158 +15,191 @@
<option value="completed">완료</option> <option value="completed">완료</option>
</select> </select>
</span> </span>
<span class="col-md-8"> <span class="col-md-8">
<input id="search_word" name="search_word" class="form-control form-control-sm w-75" type="text" placeholder="" aria-label="Search"> <input id="search_word" name="search_word" class="form-control form-control-sm w-75" type="text"
placeholder="" aria-label="Search">
<button id="search" class="btn btn-sm btn-outline-success">검색</button> <button id="search" class="btn btn-sm btn-outline-success">검색</button>
<button id="reset_btn" class="btn btn-sm btn-outline-success">리셋</button> <button id="reset_btn" class="btn btn-sm btn-outline-success">리셋</button>
</span> </span>
</div> </div>
</div> </div>
</form> </form>
<div id='page1'></div> <div id='page1'></div>
{{ macros.m_hr_head_top() }} {{ macros.m_hr_head_top() }}
{{ macros.m_row_start('0') }} {{ macros.m_row_start('0') }}
{{ macros.m_col(2, macros.m_strong('Poster')) }} {{ macros.m_col(2, macros.m_strong('Poster')) }}
{{ macros.m_col(10, macros.m_strong('Info')) }} {{ macros.m_col(10, macros.m_strong('Info')) }}
{{ macros.m_row_end() }} {{ macros.m_row_end() }}
{{ macros.m_hr_head_bottom() }} {{ macros.m_hr_head_bottom() }}
<div id="list_div"></div> <div id="list_div"></div>
<div id='page2'></div> <div id='page2'></div>
</div> </div>
<script type="text/javascript"> <script type="text/javascript">
var package_name = "{{arg['package_name']}}"; var package_name = "{{arg['package_name']}}";
var sub = "{{arg['sub']}}"; var sub = "{{arg['sub']}}";
var current_data = null; var current_data = null;
$(document).ready(function(){ $(document).ready(function () {
global_sub_request_search('1'); global_sub_request_search('1');
}); });
$("#search").click(function(e) { $("#search").click(function (e) {
e.preventDefault(); e.preventDefault();
global_sub_request_search('1'); global_sub_request_search('1');
}); });
$("body").on('click', '#page', function(e){ $("body").on('click', '#page', function (e) {
e.preventDefault(); e.preventDefault();
global_sub_request_search($(this).data('page')); global_sub_request_search($(this).data('page'));
}); });
$("#reset_btn").click(function(e) { $("#reset_btn").click(function (e) {
e.preventDefault(); e.preventDefault();
document.getElementById("order").value = 'desc'; document.getElementById("order").value = 'desc';
document.getElementById("option").value = 'all'; document.getElementById("option").value = 'all';
document.getElementById("search_word").value = ''; document.getElementById("search_word").value = '';
global_sub_request_search('1') global_sub_request_search('1')
}); });
$("body").on('click', '#json_btn', function(e){ $("body").on('click', '#json_btn', function (e) {
e.preventDefault(); e.preventDefault();
var id = $(this).data('id'); var id = $(this).data('id');
for (i in current_data.list) { for (i in current_data.list) {
if (current_data.list[i].id == id) { if (current_data.list[i].id == id) {
m_modal(current_data.list[i]) m_modal(current_data.list[i])
} }
} }
}); });
$("body").on('click', '#self_search_btn', function(e){ $("body").on('click', '#self_search_btn', function (e) {
e.preventDefault(); e.preventDefault();
var search_word = $(this).data('title'); var search_word = $(this).data('title');
document.getElementById("search_word").value = search_word; document.getElementById("search_word").value = search_word;
global_sub_request_search('1') global_sub_request_search('1')
}); });
$("body").on('click', '#remove_btn', function(e) { $("body").on('click', '#remove_btn', function (e) {
e.preventDefault(); e.preventDefault();
id = $(this).data('id'); id = $(this).data('id');
$.ajax({ $.ajax({
url: '/'+package_name+'/ajax/'+sub+ '/db_remove', url: '/' + package_name + '/ajax/' + sub + '/db_remove',
type: "POST", type: "POST",
cache: false, cache: false,
data: {id:id}, data: {id: id},
dataType: "json", dataType: "json",
success: function (data) { success: function (data) {
if (data) { if (data) {
$.notify('<strong>삭제되었습니다.</strong>', { $.notify('<strong>삭제되었습니다.</strong>', {
type: 'success' type: 'success'
});
global_sub_request_search(current_data.paging.current_page, false)
} else {
$.notify('<strong>삭제 실패</strong>', {
type: 'warning'
});
}
}
}); });
global_sub_request_search(current_data.paging.current_page, false) });
} else {
$.notify('<strong>삭제 실패</strong>', { $("body").on('click', '#request_btn', function (e) {
type: 'warning' e.preventDefault();
}); var content_code = $(this).data('content_code');
} $(location).attr('href', '/' + package_name + '/' + sub + '/request?content_code=' + content_code)
});
function make_list(data) {
//console.log(data)
str = '';
for (i in data) {
//console.log(data[i])
str += m_row_start();
str += m_col(1, data[i].id);
tmp = (data[i].status == 'completed') ? '완료' : '미완료';
str += m_col(1, tmp);
tmp = data[i].created_time + '(추가)';
if (data[i].completed_time != null)
tmp += data[i].completed_time + '(완료)';
str += m_col(3, tmp)
tmp = data[i].savepath + '<br>' + data[i].filename + '<br><br>';
tmp2 = m_button('json_btn', 'JSON', [{'key': 'id', 'value': data[i].id}]);
tmp2 += m_button('request_btn', '작품 검색', [{'key': 'content_code', 'value': data[i].content_code}]);
tmp2 += m_button('self_search_btn', '목록 검색', [{'key': 'title', 'value': data[i].title}]);
tmp2 += m_button('remove_btn', '삭제', [{'key': 'id', 'value': data[i].id}]);
tmp += m_button_group(tmp2)
str += m_col(7, tmp)
str += m_row_end();
if (i != data.length - 1) str += m_hr();
}
document.getElementById("list_div").innerHTML = str;
} }
});
});
$("body").on('click', '#request_btn', function(e){
e.preventDefault();
var content_code = $(this).data('content_code');
$(location).attr('href', '/' + package_name + '/' + sub + '/request?content_code=' + content_code)
});
function make_list(data) {
//console.log(data)
str = '';
for (i in data) {
//console.log(data[i])
str += m_row_start();
str += m_col(1, data[i].id);
tmp = (data[i].status == 'completed') ? '완료' : '미완료';
str += m_col(1, tmp);
tmp = data[i].created_time + '(추가)';
if (data[i].completed_time != null)
tmp += data[i].completed_time + '(완료)';
str += m_col(3, tmp)
tmp = data[i].savepath + '<br>' + data[i].filename + '<br><br>';
tmp2 = m_button('json_btn', 'JSON', [{'key':'id', 'value':data[i].id}]);
tmp2 += m_button('request_btn', '작품 검색', [{'key':'content_code', 'value':data[i].content_code}]);
tmp2 += m_button('self_search_btn', '목록 검색', [{'key':'title', 'value':data[i].title}]);
tmp2 += m_button('remove_btn', '삭제', [{'key':'id', 'value':data[i].id}]);
tmp += m_button_group(tmp2)
str += m_col(7, tmp)
str += m_row_end();
if (i != data.length -1) str += m_hr();
}
document.getElementById("list_div").innerHTML = str;
}
</script> </script>
<style> <style>
body { body {
width: 100%; width: 100%;
/*height: 100vh;*/ /*height: 100vh;*/
/*display: flex;*/ /*display: flex;*/
align-items: center; align-items: center;
justify-content: center; justify-content: center;
background-size: 300% 300%; background-size: 300% 300%;
background-image: linear-gradient( background-image: linear-gradient(
-45deg, -45deg,
rgba(59,173,227,1) 0%, rgba(59, 173, 227, 1) 0%,
rgba(87,111,230,1) 25%, rgba(87, 111, 230, 1) 25%,
rgba(152,68,183,1) 51%, rgba(152, 68, 183, 1) 51%,
rgba(255,53,127,1) 100% rgba(255, 53, 127, 1) 100%
); );
animation: AnimateBG 20s ease infinite; animation: AnimateBG 20s ease infinite;
} }
#main_container {
background-color: white;
}
@keyframes AnimateBG { #main_container {
0%{background-position:0% 50%} background-color: white;
50%{background-position:100% 50%} }
100%{background-position:0% 50%}
} @keyframes AnimateBG {
0% {
background-position: 0% 50%
}
50% {
background-position: 100% 50%
}
100% {
background-position: 0% 50%
}
}
@media (min-width: 576px) {
.container {
max-width: 540px;
min-height: 1080px;
}
}
@media (min-width: 768px) {
.container {
max-width: 720px;
min-height: 1080px;
}
}
@media (min-width: 992px) {
.container {
max-width: 960px;
min-height: 1080px;
}
}
@media (min-width: 1200px) {
.container {
max-width: 1140px;
min-height: 1080px;
}
}
</style> </style>
{% endblock %} {% endblock %}

View File

@@ -120,9 +120,11 @@
} }
function make_program(data) { function make_program(data) {
current_data = data; current_data = data;
// console.log("current_data::", current_data) // console.log("current_data::", current_data)
str = ''; let str = '';
let tmp = '';
tmp = '<div class="form-inline">' tmp = '<div class="form-inline">'
tmp += m_button('check_download_btn', '선택 다운로드 추가', []); tmp += m_button('check_download_btn', '선택 다운로드 추가', []);
tmp += m_button('all_check_on_btn', '전체 선택', []); tmp += m_button('all_check_on_btn', '전체 선택', []);
@@ -143,18 +145,8 @@
tmp = '<img src="' + data.image + '" class="img-fluid">'; tmp = '<img src="' + data.image + '" class="img-fluid">';
str += m_col(3, tmp) str += m_col(3, tmp)
tmp = '' tmp = ''
tmp += m_row_start(2) + m_col(3, '제목', 'right') + m_col(9, data.title) + m_row_end(); // tmp += m_row_start(2) + m_col(3, '제목', 'right') + m_col(9, data.title) + m_row_end();
// tmp += m_row_start(2) + m_col(3, '제작사', 'right') + m_col(9, data.des._pub) + m_row_end(); tmp += '<div><p><b style="font-size: 15px; color: midnightblue">'+data.title+'</b></p></div>'
// tmp += m_row_start(2) + m_col(3, '감독', 'right') + m_col(9, data.des._dir) + m_row_end();
//
// tmp += m_row_start(2) + m_col(3, '원작', 'right') + m_col(9, data.des._otit) + m_row_end();
// tmp += m_row_start(2) + m_col(3, '장르', 'right') + m_col(9, data.des._tag) + m_row_end();
// tmp += m_row_start(2) + m_col(3, '분류', 'right') + m_col(9, data.des._classifi) + m_row_end();
// tmp += m_row_start(2) + m_col(3, '공식 방영일', 'right') + m_col(9, data.date+'('+data.day+')') + m_row_end();
// tmp += m_row_start(2) + m_col(3, '에피소드', 'right') + m_col(9, data.des._total_chapter ? data.des._total_chapter : '') + m_row_end();
// tmp += m_row_start(2) + m_col(3, '등급', 'right') + m_col(9, data.des._grade) + m_row_end();
// tmp += m_row_start(2) + m_col(3, '최근 방영일', 'right') + m_col(9, data.des._recent_date ? data.des._recent_date : '') + m_row_end();
// tmp += m_row_start(2) + m_col(3, '줄거리', 'right') + m_col(9, data.ser_description) + m_row_end();
tmp += "<div>" + data.des1 + "</div>" tmp += "<div>" + data.des1 + "</div>"
str += m_col(9, tmp) str += m_col(9, tmp)
@@ -184,15 +176,21 @@
} }
$(function () { $(function () {
console.log(params.wr_id) // console.log(params.wr_id)
console.log(findGetParameter('wr_id')) // console.log("{{arg['anilife_current_code']}}")
console.log(params.code) // console.log(findGetParameter('wr_id'))
if (params.code === '') { // console.log(params.code)
if (params.code === '' || params.code == null) {
// console.log('null')
dismissLoadingScreen()
return false;
} else { } else {
console.log('here')
document.getElementById("code").value = params.code document.getElementById("code").value = params.code
document.getElementById("analysis_btn").click(); document.getElementById("analysis_btn").click();
return; return true;
} }
if ("{{arg['anilife_current_code']}}" !== "") { if ("{{arg['anilife_current_code']}}" !== "") {
@@ -215,7 +213,7 @@
// 값이 공백이 아니면 분석 버튼 계속 누름 // 값이 공백이 아니면 분석 버튼 계속 누름
// {#document.getElementById("analysis_btn").click();#} // {#document.getElementById("analysis_btn").click();#}
} else { } else {
return false;
} }
}) })

View File

@@ -7,7 +7,7 @@
<nav> <nav>
{{ macros.m_tab_head_start() }} {{ macros.m_tab_head_start() }}
{{ macros.m_tab_head2('normal', '일반', true) }} {{ macros.m_tab_head2('normal', '일반', true) }}
{{ macros.m_tab_head2('auto', '홈화면 자동', false) }} {{ macros.m_tab_head2('auto', '자동 설정', false) }}
{{ macros.m_tab_head2('action', '기타', false) }} {{ macros.m_tab_head2('action', '기타', false) }}
{{ macros.m_tab_head_end() }} {{ macros.m_tab_head_end() }}
</nav> </nav>
@@ -28,9 +28,9 @@
{{ macros.m_tab_content_start('auto', false) }} {{ macros.m_tab_content_start('auto', false) }}
{{ macros.setting_global_scheduler_sub_button(arg['scheduler'], arg['is_running']) }} {{ macros.setting_global_scheduler_sub_button(arg['scheduler'], arg['is_running']) }}
{{ macros.setting_input_text('anilife_interval', '스케쥴링 실행 정보', value=arg['anilife_interval'], col='3', desc=['Inverval(minute 단위)이나 Cron 설정']) }} {{ macros.setting_input_text('anilife_interval', '스케쥴링 실행 정보', value=arg['anilife_interval'], col='4', desc=['Interval(minute 단위)이나 Cron 설정']) }}
{{ macros.setting_checkbox('anilife_auto_start', '시작시 자동실행', value=arg['anilife_auto_start'], desc='On : 시작시 자동으로 스케쥴러에 등록됩니다.') }} {{ macros.setting_checkbox('anilife_auto_start', '시작시 자동실행', value=arg['anilife_auto_start'], desc='On : 시작시 자동으로 스케쥴러에 등록 됩니다.') }}
{{ macros.setting_input_textarea('anilife_auto_code_list', '자동 다운로드할 작품 코드', desc=['all 입력시 모두 받기', '구분자 | 또는 엔터'], value=arg['anilife_auto_code_list'], row='10') }} {{ macros.setting_input_textarea('anilife_auto_code_list', '자동 다운로드할 작품 코드', desc=['구분자 | 또는 엔터'], value=arg['anilife_auto_code_list'], row='10') }}
{{ macros.setting_checkbox('anilife_auto_mode_all', '에피소드 모두 받기', value=arg['anilife_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }} {{ macros.setting_checkbox('anilife_auto_mode_all', '에피소드 모두 받기', value=arg['anilife_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }}
{{ macros.m_tab_content_end() }} {{ macros.m_tab_content_end() }}

View File

@@ -24,8 +24,8 @@
<div> <div>
<form id="program_list"> <form id="program_list">
{{ macros.setting_input_text_and_buttons('code', '작품 Code', {{ macros.setting_input_text_and_buttons('code', '작품 Code',
[['analysis_btn', '분석'], ['go_ohli24_btn', 'Go OHLI24']], desc='예) [['analysis_btn', '분석'], ['go_linkkf_btn', 'Go 링크 애니']], desc='예)
"https://ohli24.net/c/녹을 먹는 비스코" 나 "녹을 먹는 비스코"') }} "https://linkkf.app/코드" 나 "코"') }}
</form> </form>
<form id="program_auto_form"> <form id="program_auto_form">
<div id="episode_list"></div> <div id="episode_list"></div>
@@ -45,7 +45,7 @@
const package_name = "{{arg['package_name'] }}"; const package_name = "{{arg['package_name'] }}";
const sub = "{{arg['sub'] }}"; const sub = "{{arg['sub'] }}";
const ohli24_url = "{{arg['ohli24_url']}}"; const linkkf_url = "{{arg['linkkf_url']}}";
const params = new Proxy(new URLSearchParams(window.location.search), { const params = new Proxy(new URLSearchParams(window.location.search), {
@@ -132,7 +132,7 @@
str += tmp; str += tmp;
// program // program
// str += m_hr_black(); // str += m_hr_black();
str += "<div class='card p-lg-5 mt-md-3 p-md-3 border-light'>" str += "<div class='card p-lg-5 mt-md-3 p-md-3 mt-sm-3 p-sm-3 border-light'>"
str += m_row_start(0); str += m_row_start(0);
tmp = ""; tmp = "";
@@ -209,13 +209,13 @@
// {#document.getElementById("analysis_btn").click();#} // {#document.getElementById("analysis_btn").click();#}
} }
if ("{{arg['ohli24_current_code']}}" !== "") { if ("{{arg['linkkf_current_code']}}" !== "") {
if (params.code === null) { if (params.code === null) {
console.log('params.code === null') console.log('params.code === null')
document.getElementById("code").value = "{{arg['ohli24_current_code']}}"; document.getElementById("code").value = "{{arg['linkkf_current_code']}}";
} else if (params.code === '') { } else if (params.code === '') {
document.getElementById("code").value = "{{arg['ohli24_current_code']}}"; document.getElementById("code").value = "{{arg['linkkf_current_code']}}";
} else { } else {
console.log('params code exist') console.log('params code exist')
@@ -243,7 +243,9 @@
$("#analysis_btn").unbind("click").bind('click', function (e) { $("#analysis_btn").unbind("click").bind('click', function (e) {
e.preventDefault(); e.preventDefault();
e.stopPropagation() e.stopPropagation()
const button = document.getElementById('analysis_btn');
const code = document.getElementById("code").value const code = document.getElementById("code").value
button.setAttribute("disabled", "disabled");
console.log(code) console.log(code)
$.ajax({ $.ajax({
url: '/' + package_name + '/ajax/' + sub + '/analysis', url: '/' + package_name + '/ajax/' + sub + '/analysis',
@@ -256,6 +258,7 @@
// {#console.log(ret.code)#} // {#console.log(ret.code)#}
console.log(ret.data) console.log(ret.data)
make_program(ret.data) make_program(ret.data)
button.removeAttribute("disabled");
} else { } else {
$.notify('<strong>분석 실패</strong><br>' + ret.log, {type: 'warning'}); $.notify('<strong>분석 실패</strong><br>' + ret.log, {type: 'warning'});
} }
@@ -264,9 +267,9 @@
}); });
$("body").on('click', '#go_ohli24_btn', function (e) { $("body").on('click', '#go_linkkf_btn', function (e) {
e.preventDefault(); e.preventDefault();
window.open("{{arg['ohli24_url']}}", "_blank"); window.open("{{arg['linkkf_url']}}", "_blank");
}); });
$("body").on('click', '#all_check_on_btn', function (e) { $("body").on('click', '#all_check_on_btn', function (e) {

View File

@@ -7,7 +7,7 @@
<nav> <nav>
{{ macros.m_tab_head_start() }} {{ macros.m_tab_head_start() }}
{{ macros.m_tab_head2('normal', '일반', true) }} {{ macros.m_tab_head2('normal', '일반', true) }}
{{ macros.m_tab_head2('auto', '홈화면 자동', false) }} {{ macros.m_tab_head2('auto', '자동 설정', false) }}
{{ macros.m_tab_head2('action', '기타', false) }} {{ macros.m_tab_head2('action', '기타', false) }}
{{ macros.m_tab_head_end() }} {{ macros.m_tab_head_end() }}
</nav> </nav>
@@ -28,9 +28,9 @@
{{ macros.m_tab_content_start('auto', false) }} {{ macros.m_tab_content_start('auto', false) }}
{{ macros.setting_global_scheduler_sub_button(arg['scheduler'], arg['is_running']) }} {{ macros.setting_global_scheduler_sub_button(arg['scheduler'], arg['is_running']) }}
{{ macros.setting_input_text('linkkf_interval', '스케쥴링 실행 정보', value=arg['linkkf_interval'], col='3', desc=['Inverval(minute 단위)이나 Cron 설정']) }} {{ macros.setting_input_text('linkkf_interval', '스케쥴링 실행 정보', value=arg['linkkf_interval'], col='4', desc=['Inverval(minute 단위)이나 Cron 설정']) }}
{{ macros.setting_checkbox('linkkf_auto_start', '시작시 자동실행', value=arg['linkkf_auto_start'], desc='On : 시작시 자동으로 스케쥴러에 등록됩니다.') }} {{ macros.setting_checkbox('linkkf_auto_start', '시작시 자동실행', value=arg['linkkf_auto_start'], desc='On : 시작시 자동으로 스케쥴러에 등록됩니다.') }}
{{ macros.setting_input_textarea('linkkf_auto_code_list', '자동 다운로드할 작품 코드', desc=['all 입력시 모두 받기', '구분자 | 또는 엔터'], value=arg['linkkf_auto_code_list'], row='10') }} {{ macros.setting_input_textarea('linkkf_auto_code_list', '자동 다운로드할 작품 코드', desc=['구분자 | 또는 엔터'], value=arg['linkkf_auto_code_list'], row='10') }}
{{ macros.setting_checkbox('linkkf_auto_mode_all', '에피소드 모두 받기', value=arg['linkkf_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }} {{ macros.setting_checkbox('linkkf_auto_mode_all', '에피소드 모두 받기', value=arg['linkkf_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }}
{{ macros.m_tab_content_end() }} {{ macros.m_tab_content_end() }}

View File

@@ -24,6 +24,7 @@
{{ macros.setting_checkbox('ohli24_auto_make_season_folder', '시즌 폴더 생성', value=arg['ohli24_auto_make_season_folder'], desc=['On : Season 번호 폴더를 만듭니다.']) }} {{ macros.setting_checkbox('ohli24_auto_make_season_folder', '시즌 폴더 생성', value=arg['ohli24_auto_make_season_folder'], desc=['On : Season 번호 폴더를 만듭니다.']) }}
</div> </div>
{{ macros.setting_checkbox('ohli24_uncompleted_auto_enqueue', '자동으로 다시 받기', value=arg['ohli24_uncompleted_auto_enqueue'], desc=['On : 플러그인 로딩시 미완료인 항목은 자동으로 다시 받습니다.']) }} {{ macros.setting_checkbox('ohli24_uncompleted_auto_enqueue', '자동으로 다시 받기', value=arg['ohli24_uncompleted_auto_enqueue'], desc=['On : 플러그인 로딩시 미완료인 항목은 자동으로 다시 받습니다.']) }}
{{ macros.setting_checkbox('ohli24_discord_notify', '디스 코드 알림 받기', value=arg['ohli24_discord_notify'], desc=['On : 새로운 글이 올라올때 디스코드 알림을 보냅니다.']) }}
{{ macros.m_tab_content_end() }} {{ macros.m_tab_content_end() }}
{{ macros.m_tab_content_start('auto', false) }} {{ macros.m_tab_content_start('auto', false) }}

File diff suppressed because one or more lines are too long

94
test.py
View File

@@ -1,94 +0,0 @@
from playwright.sync_api import sync_playwright
from playwright.async_api import async_playwright
# from playwright_stealth import stealth_sync
import asyncio
import html_to_json
async def run(playwright):
headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"Accept-Language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7",
"Referer": "https://anilife.live/",
# "Cookie": "SPSI=ef307b8c976fac3363cdf420c9ca40a9; SPSE=+PhK0/uGUBMCZIgXplNjzqW3K2kXLybiElDTtOOiboHiBXO7Tp/9roMW7FplGZuGCUo3i4Fwx5VIUG57Zj6VVw==; anilife_csrf=b1eb92529839d7486169cd91e4e60cd2; UTGv2=h45f897818578a5664b31004b95a9992d273; _ga=GA1.1.281412913.1662803695; _ga_56VYJJ7FTM=GS1.1.1662803695.1.0.1662803707.0.0.0; DCST=pE9; DSR=w2XdPUpwLWDqkLpWXfs/5TiO4mtNv5O3hqNhEr7GP1kFoRBBzbFRpR+xsJd9A+E29M+we7qIvJxQmHQTjDNLuQ==; DCSS=696763EB4EA5A67C4E39CFA510FE36F19B0912C; DGCC=RgP; spcsrf=8a6b943005d711258f2f145a8404d873; sp_lit=F9PWLXyxvZbOyk3eVmtTlg==; PRLST=wW; adOtr=70fbCc39867"
# "Cookie": ""
# "Cookie": "_ga=GA1.1.578607927.1660813724; __gads=ID=10abb8b98b6828ae-2281c943a9d500fd:T=1660813741:RT=1660813741:S=ALNI_MYU_iB2lBgSrEQUBwhKpNsToaqQ8A; SL_G_WPT_TO=ko; SL_GWPT_Show_Hide_tmp=1; SL_wptGlobTipTmp=1; SPSI=944c237cdd8606d80e5e330a0f332d03; SPSE=itZcXMDuso0ktWnDkV2G0HVwWEctCgDjrcFMlEQ5C745wqvp1pEEddrsAsjPUBjl6/8+9Njpq1IG3wt/tVag7w==; sbtsck=jav9aILa6Ofn0dEQr5DhDq5rpbd1JUoNgKwxBpZrqYd+CM=; anilife_csrf=54ee9d15c87864ee5e2538a63d894ad6; UTGv2=h46b326af644f4ac5d0eb1502881136b3750; DCST=pE9; __gpi=UID=000008ba227e99e0:T=1660813741:RT=1661170429:S=ALNI_MaJHIVJIGpQ5nTE9lvypKQxJnn10A; DSR=GWyTLTvSMF/lQD77ojQkGyl+7JvTudkSwV1GKeNVUcWEBa/msln9zzsBj7lj+89ywSRBM34Ol73AKf+KHZ9bZA==; DCSS=9D44115EC4CE12CADB88A005DC65A3CD74A211E; DGCC=zdV; spcsrf=fba136251afc6b5283109fc920322c70; sp_lit=kw0Xkp66eQ7bV0f0tNClhg==; PRLST=gt; adOtr=2C4H9c4d78d; _ga_56VYJJ7FTM=GS1.1.1661168661.18.1.1661173389.0.0.0",
}
useragent = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, "
"like Gecko) Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"
}
browser = await playwright.webkit.launch(headless=False)
# context = browser.new_context(
# user_agent=ua,
# )
# url = "https://anilife.live/h/live?p=7ccd9e49-9f59-4976-b5d8-25725d6a6188&a=none&player=jawcloud"
url = "https://api-svr-01.anilife.live/m3u8/st/MDk5NzUyNjY3NTkwZTc3ZmYwMGRmNGIyMzk3MGZiNzU1YjBkNjk2YTFiMzJiZTVhZWZjYjg3NGY4YmE3NTkyMDRkNTU1Y2RjMDhkZTkwNWZiMzZiMTI3ZjE5Zjk0YzQ3MjgzYmUxMTIzZTM2OTllMWZlMzZjM2I1OTIxMmNkNmZmODUxOWZhY2JiMzUxYmE4ZjVjOTMyNzFiYzA0YWI1OTNjZWU0NzMwOTJmYTA4NGU1ZDM1YTlkODA5NzljOTMxNTVhYjlmMmQwMWIwOGMyMTg1N2UyOWJjYjZjN2UwNzJkNjBiOGQzNzc4NTZlZjlkNTQwMDQ5MjgyOGQzYjQxN2M1YmIzYmZiYWYwNGQ0M2U5YmIwMjc4NjgyN2I4M2M1ZDFjOWUxMjM3MjViZDJlZDM3MGI0ZmJkNDE2MThhYTY2N2JlZDllNjQwNTg4MGIxZjBmYTYzMTU4ZTJlZmI1Zg==/dKtKWqgJFnmS-1XShKtsaJWn_OMY1F_HdGDxH2w38mQ/1662826054"
#
# if referer is not None:
# LogicAniLife.headers["Referer"] = referer
# context = browser.new_context(extra_http_headers=LogicAniLife.headers)
# context = await browser.new_context()
context = await browser.new_context(extra_http_headers=headers)
# LogicAniLife.headers["Cookie"] = cookie_value
# context.set_extra_http_headers(LogicAniLife.headers)
page = await context.new_page()
# page.on("request", set_cookie)
# stealth_sync(page)
await page.goto(url, wait_until="networkidle")
await page.wait_for_timeout(2000)
# time.sleep(1)
# page.reload()
# time.sleep(10)
cookies = context.cookies
# print(cookies)
# print(page.content())
# vod_url = await page.evaluate(
# """() => {
# return console.log(vodUrl_1080p) }"""
# )
# vod_url1 = await page.evaluate(
# """async () =>{
# return _0x55265f(0x99) + alJson[_0x55265f(0x91)]
# }"""
# )
# print(vod_url)
# print(vod_url1)
html_content = await page.content()
# print(await page.content())
# print(f"html_content:: {html_content}")
output_json = html_to_json.convert(html_content)
print(output_json)
print(f"output_json:: {output_json['html'][0]['body'][0]['_value']}")
async def main():
async with async_playwright() as p:
await run(p)
from loguru import logger
import snoop
class Calc:
@staticmethod
# @logger.catch()
@snoop
def add(a, b):
return a + b
cal = Calc()
cal.add(1, 2) # return 3

View File

@@ -1,38 +0,0 @@
import asyncio
from playwright.async_api import Playwright, async_playwright
async def run(playwright: Playwright) -> None:
browser = await playwright.chromium.launch(headless=False)
context = await browser.new_context()
# Open new page
page = await context.new_page()
# Go to https://sir.kr/
await page.goto("https://sir.kr/")
await asyncio.sleep(1)
# Click [placeholder="아이디"]
await page.locator('[placeholder="아이디"]').click()
# Fill [placeholder="아이디"]
await page.locator('[placeholder="아이디"]').fill("tongki77")
# Press Tab
await page.locator('[placeholder="아이디"]').press("Tab")
# Fill [placeholder="비밀번호"]
await page.locator('[placeholder="비밀번호"]').fill("sir98766")
# Click input:has-text("로그인")
await page.locator('input:has-text("로그인")').click()
# await expect(page).to_have_url("https://sir.kr/")
# Click text=출석 2
await asyncio.sleep(2)
await page.locator("text=출석 2").click()
await asyncio.sleep(2)
# ---------------------
await context.close()
await browser.close()
async def main() -> None:
async with async_playwright() as playwright:
await run(playwright)
asyncio.run(main())