linkkf 수정3

This commit is contained in:
2025-12-25 19:50:27 +09:00
parent 426df4ab43
commit 8f8ffb4937
29 changed files with 20496 additions and 1422 deletions

BIN
.DS_Store vendored

Binary file not shown.

163
.gitignore vendored
View File

@@ -1,2 +1,161 @@
*.pyo # Byte-compiled / optimized / DLL files
*.pyc __pycache__/
*.py[co]
*$py.class
venv
.DS_Store
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
#lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
#.env
#.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# FlaksFarm
config.yaml
lib2/
.vscode/
memo.txt
*.zip
flaskfarm.sh
export.sh
run.sh
pre_start.sh
*.code-workspace
false
*copy.py
*.sh
data/
tmp/
lib/support/site/tving.py
lib/support/site/wavve.py
lib/support/site/seezn.py
lib/support/site/kakaotv.py
*.bat
output/
*.mkv
playwright.ipynb
test.ipynb
unanalyzed.py
test.ipynb
.DS_Store

View File

@@ -164,9 +164,7 @@ class LogicLinkkf(PluginModuleBase):
data = self.get_anime_info(cate, page) data = self.get_anime_info(cate, page)
# self.current_data = data # self.current_data = data
return jsonify( return jsonify({"ret": "success", "cate": cate, "page": page, "data": data})
{"ret": "success", "cate": cate, "page": page, "data": data}
)
elif sub == "screen_movie_list": elif sub == "screen_movie_list":
try: try:
logger.debug("request:::> %s", request.form["page"]) logger.debug("request:::> %s", request.form["page"])
@@ -306,21 +304,14 @@ class LogicLinkkf(PluginModuleBase):
# str(x.strip().replace(" ", "")) # str(x.strip().replace(" ", ""))
# for x in whitelist_program.replace("\n", "|").split("|") # for x in whitelist_program.replace("\n", "|").split("|")
# ] # ]
whitelist_programs = [ whitelist_programs = [str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")]
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")
]
if code not in whitelist_programs: if code not in whitelist_programs:
whitelist_programs.append(code) whitelist_programs.append(code)
whitelist_programs = filter( whitelist_programs = filter(lambda x: x != "", whitelist_programs) # remove blank code
lambda x: x != "", whitelist_programs
) # remove blank code
whitelist_program = "|".join(whitelist_programs) whitelist_program = "|".join(whitelist_programs)
entity = ( entity = (
db.session.query(P.ModelSetting) db.session.query(P.ModelSetting).filter_by(key="linkkf_auto_code_list").with_for_update().first()
.filter_by(key="linkkf_auto_code_list")
.with_for_update()
.first()
) )
entity.value = whitelist_program entity.value = whitelist_program
db.session.commit() db.session.commit()
@@ -341,12 +332,8 @@ class LogicLinkkf(PluginModuleBase):
return ret return ret
def setting_save_after(self): def setting_save_after(self):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int( if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count"):
"linkkf_max_ffmpeg_process_count" self.queue.set_max_ffmpeg_count(P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count"))
):
self.queue.set_max_ffmpeg_count(
P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
)
def get_video_url_from_url(url, url2): def get_video_url_from_url(url, url2):
video_url = None video_url = None
@@ -391,9 +378,7 @@ class LogicLinkkf(PluginModuleBase):
# print(vtt_elem) # print(vtt_elem)
match = re.compile( match = re.compile(r"<track.+src=\"(?P<vtt_url>.*?.vtt)\"", re.MULTILINE).search(data)
r"<track.+src=\"(?P<vtt_url>.*?.vtt)\"", re.MULTILINE
).search(data)
vtt_url = match.group("vtt_url") vtt_url = match.group("vtt_url")
@@ -422,14 +407,10 @@ class LogicLinkkf(PluginModuleBase):
# @k45734 # @k45734
vtt_url = None vtt_url = None
try: try:
_match1 = re.compile( _match1 = re.compile(r"<track.+src=\"(?P<vtt_url>.*?.vtt)", re.MULTILINE).search(data)
r"<track.+src=\"(?P<vtt_url>.*?.vtt)", re.MULTILINE
).search(data)
vtt_url = _match1.group("vtt_url") vtt_url = _match1.group("vtt_url")
except: except:
_match2 = re.compile( _match2 = re.compile(r"url: \'(?P<vtt_url>.*?.vtt)", re.MULTILINE).search(data)
r"url: \'(?P<vtt_url>.*?.vtt)", re.MULTILINE
).search(data)
vtt_url = _match2.group("vtt_url") vtt_url = _match2.group("vtt_url")
logger.info("vtt_url: %s", vtt_url) logger.info("vtt_url: %s", vtt_url)
@@ -505,15 +486,11 @@ class LogicLinkkf(PluginModuleBase):
elif "kakao" in url2: elif "kakao" in url2:
# kakao 계열 처리, 외부 API 이용 # kakao 계열 처리, 외부 API 이용
payload = {"inputUrl": url2} payload = {"inputUrl": url2}
kakao_url = ( kakao_url = "http://webtool.cusis.net/wp-pages/download-kakaotv-video/video.php"
"http://webtool.cusis.net/wp-pages/download-kakaotv-video/video.php"
)
data2 = requests.post( data2 = requests.post(
kakao_url, kakao_url,
json=payload, json=payload,
headers={ headers={"referer": "http://webtool.cusis.net/download-kakaotv-video/"},
"referer": "http://webtool.cusis.net/download-kakaotv-video/"
},
).content ).content
time.sleep(3) # 서버 부하 방지를 위해 단시간에 너무 많은 URL전송을 하면 IP를 차단합니다. time.sleep(3) # 서버 부하 방지를 위해 단시간에 너무 많은 URL전송을 하면 IP를 차단합니다.
url3 = json.loads(data2) url3 = json.loads(data2)
@@ -647,9 +624,7 @@ class LogicLinkkf(PluginModuleBase):
if pattern.match(js_script.text_content()): if pattern.match(js_script.text_content()):
# logger.debug("match::::") # logger.debug("match::::")
match_data = pattern.match(js_script.text_content()) match_data = pattern.match(js_script.text_content())
iframe_info = json.loads( iframe_info = json.loads(match_data.groups()[0].replace("path:", '"path":'))
match_data.groups()[0].replace("path:", '"path":')
)
# logger.debug(f"iframe_info:: {iframe_info}") # logger.debug(f"iframe_info:: {iframe_info}")
index += 1 index += 1
@@ -665,9 +640,13 @@ class LogicLinkkf(PluginModuleBase):
def get_anime_info(self, cate, page): def get_anime_info(self, cate, page):
try: try:
items_xpath = '//div[@class="ext-json-item"]'
title_xpath = ""
if cate == "ing": if cate == "ing":
url = f"{P.ModelSetting.get('linkkf_url')}/airing/page/{page}" # url = f"{P.ModelSetting.get('linkkf_url')}/airing/page/{page}"
items_xpath = '//div[@class="myui-vodlist__box"]' url = "https://linkkf.5imgdarr.top/api/singlefilter.php?categorytagid=1970&page=1&limit=20"
items_xpath = '//div[@class="ext-json-item"]'
title_xpath = './/a[@class="text-fff"]//text()' title_xpath = './/a[@class="text-fff"]//text()'
elif cate == "movie": elif cate == "movie":
url = f"{P.ModelSetting.get('linkkf_url')}/ani/page/{page}" url = f"{P.ModelSetting.get('linkkf_url')}/ani/page/{page}"
@@ -679,19 +658,39 @@ class LogicLinkkf(PluginModuleBase):
title_xpath = './/a[@class="text-fff"]//text()' title_xpath = './/a[@class="text-fff"]//text()'
elif cate == "top_view": elif cate == "top_view":
url = f"{P.ModelSetting.get('linkkf_url')}/topview/page/{page}" url = f"{P.ModelSetting.get('linkkf_url')}/topview/page/{page}"
items_xpath = '//div[@class="myui-vodlist__box"]' items_xpath = '//div[@class="ext-json-item"]'
title_xpath = './/a[@class="text-fff"]//text()' title_xpath = './/a[@class="text-fff"]//text()'
else:
url = "https://linkkf.5imgdarr.top/api/singlefilter.php?categorytagid=1970&page=1&limit=20"
logger.info("url:::> %s", url) logger.info("url:::> %s", url)
logger.info("test..........................") logger.info("test..........................")
# logger.info("test..........................") # logger.info("test..........................")
if self.referer is None: if self.referer is None:
self.referer = "https://linkkf.app" self.referer = "https://linkkf.live"
data = {"ret": "success", "page": page} data = {"ret": "success", "page": page}
response_data = LogicLinkkf.get_html(url, timeout=10) response_data = LogicLinkkf.get_html(url, timeout=10)
# P.logger.debug(response_data) # P.logger.debug(response_data)
P.logger.debug("debug.....................") P.logger.debug("debug.....................")
# P.logger.debug(response_data)
# JSON 응답인지 확인
try:
json_data = json.loads(response_data)
P.logger.debug("Response is JSON format")
P.logger.debug(json_data)
# JSON 데이터를 그대로 반환하거나 필요한 형태로 가공
if isinstance(json_data, dict):
return json_data
else:
data["episode"] = json_data if isinstance(json_data, list) else []
return data
except (json.JSONDecodeError, ValueError):
# HTML 응답인 경우
P.logger.debug("Response is HTML format, parsing...")
pass
tree = html.fromstring(response_data) tree = html.fromstring(response_data)
tmp_items = tree.xpath(items_xpath) tmp_items = tree.xpath(items_xpath)
@@ -709,9 +708,7 @@ class LogicLinkkf(PluginModuleBase):
entity["title"] = item.xpath(title_xpath)[0].strip() entity["title"] = item.xpath(title_xpath)[0].strip()
entity["image_link"] = item.xpath("./a/@data-original")[0] entity["image_link"] = item.xpath("./a/@data-original")[0]
entity["chapter"] = ( entity["chapter"] = (
item.xpath("./a/span//text()")[0].strip() item.xpath("./a/span//text()")[0].strip() if len(item.xpath("./a/span//text()")) > 0 else ""
if len(item.xpath("./a/span//text()")) > 0
else ""
) )
# logger.info('entity:::', entity['title']) # logger.info('entity:::', entity['title'])
data["episode"].append(entity) data["episode"].append(entity)
@@ -724,6 +721,44 @@ class LogicLinkkf(PluginModuleBase):
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
return {"ret": "exception", "log": str(e)} return {"ret": "exception", "log": str(e)}
def get_search_result(self, query, page, cate):
try:
_query = urllib.parse.quote(query)
url = f"{P.ModelSetting.get('linkkf_url')}/search/-------------.html?wd={_query}&page={page}"
logger.info("get_search_result()::url> %s", url)
data = {"ret": "success", "page": page}
response_data = LogicLinkkf.get_html(url, timeout=10)
tree = html.fromstring(response_data)
# linkkf 검색 결과는 일반 목록과 동일한 구조
tmp_items = tree.xpath('//div[@class="myui-vodlist__box"]')
data["episode_count"] = len(tmp_items)
data["episode"] = []
if tree.xpath('//div[@id="wp_page"]//text()'):
data["total_page"] = tree.xpath('//div[@id="wp_page"]//text()')[-1]
else:
data["total_page"] = 0
for item in tmp_items:
entity = {}
entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = re.search(r"[0-9]+", entity["link"]).group()
entity["title"] = item.xpath('.//a[@class="text-fff"]//text()')[0].strip()
entity["image_link"] = item.xpath("./a/@data-original")[0]
entity["chapter"] = (
item.xpath("./a/span//text()")[0].strip() if len(item.xpath("./a/span//text()")) > 0 else ""
)
data["episode"].append(entity)
return data
except Exception as e:
P.logger.error(f"Exception: {str(e)}")
P.logger.error(traceback.format_exc())
return {"ret": "exception", "log": str(e)}
def get_series_info(self, code): def get_series_info(self, code):
data = {"code": code, "ret": False} data = {"code": code, "ret": False}
try: try:
@@ -763,11 +798,7 @@ class LogicLinkkf(PluginModuleBase):
# logger.debug(f"tmp1 size:=> {str(len(tmp))}") # logger.debug(f"tmp1 size:=> {str(len(tmp))}")
try: try:
tmp = ( tmp = tree.xpath('//div[@class="hrecipe"]/article/center/strong')[0].text_content().strip()
tree.xpath('//div[@class="hrecipe"]/article/center/strong')[0]
.text_content()
.strip()
)
except IndexError: except IndexError:
tmp = tree.xpath("//article/center/strong")[0].text_content().strip() tmp = tree.xpath("//article/center/strong")[0].text_content().strip()
@@ -781,27 +812,13 @@ class LogicLinkkf(PluginModuleBase):
data["_id"] = str(code) data["_id"] = str(code)
data["title"] = tmp.replace(data["season"] + "", "").strip() data["title"] = tmp.replace(data["season"] + "", "").strip()
data["title"] = data["title"].replace("()", "").strip() data["title"] = data["title"].replace("()", "").strip()
data["title"] = ( data["title"] = Util.change_text_for_use_filename(data["title"]).replace("OVA", "").strip()
Util.change_text_for_use_filename(data["title"])
.replace("OVA", "")
.strip()
)
try: try:
data["poster_url"] = tree.xpath( data["poster_url"] = tree.xpath('//div[@class="myui-content__thumb"]/a/@data-original')
'//div[@class="myui-content__thumb"]/a/@data-original'
)
# print(tree.xpath('//div[@class="myui-content__detail"]/text()')) # print(tree.xpath('//div[@class="myui-content__detail"]/text()'))
if len(tree.xpath('//div[@class="myui-content__detail"]/text()')) > 3: if len(tree.xpath('//div[@class="myui-content__detail"]/text()')) > 3:
data["detail"] = [ data["detail"] = [{"info": str(tree.xpath("//div[@class='myui-content__detail']/text()")[3])}]
{
"info": str(
tree.xpath(
"//div[@class='myui-content__detail']/text()"
)[3]
)
}
]
else: else:
data["detail"] = [{"정보없음": ""}] data["detail"] = [{"정보없음": ""}]
except Exception as e: except Exception as e:
@@ -813,11 +830,7 @@ class LogicLinkkf(PluginModuleBase):
tag_score = tree.xpath('//span[@class="taq-score"]')[0].text_content() tag_score = tree.xpath('//span[@class="taq-score"]')[0].text_content()
# logger.debug(tag_score) # logger.debug(tag_score)
tag_count = ( tag_count = tree.xpath('//span[contains(@class, "taq-count")]')[0].text_content().strip()
tree.xpath('//span[contains(@class, "taq-count")]')[0]
.text_content()
.strip()
)
data_rate = tree.xpath('//div[@class="rating"]/div/@data-rate') data_rate = tree.xpath('//div[@class="rating"]/div/@data-rate')
tmp2 = soup.select("ul > a") tmp2 = soup.select("ul > a")
@@ -860,9 +873,7 @@ class LogicLinkkf(PluginModuleBase):
"_id": data["code"], "_id": data["code"],
"program_code": data["code"], "program_code": data["code"],
"program_title": data["title"], "program_title": data["title"],
"save_folder": Util.change_text_for_use_filename( "save_folder": Util.change_text_for_use_filename(data["save_folder"]),
data["save_folder"]
),
"title": t.text.strip(), "title": t.text.strip(),
# "title": t.text_content().strip(), # "title": t.text_content().strip(),
} }
@@ -899,15 +910,11 @@ class LogicLinkkf(PluginModuleBase):
program_path = os.path.join(tmp_save_path, entity["save_folder"]) program_path = os.path.join(tmp_save_path, entity["save_folder"])
entity["save_path"] = program_path entity["save_path"] = program_path
if P.ModelSetting.get("linkkf_auto_make_season_folder"): if P.ModelSetting.get("linkkf_auto_make_season_folder"):
entity["save_path"] = os.path.join( entity["save_path"] = os.path.join(entity["save_path"], "Season %s" % int(entity["season"]))
entity["save_path"], "Season %s" % int(entity["season"])
)
entity["image"] = data["poster_url"] entity["image"] = data["poster_url"]
entity["filename"] = LogicLinkkf.get_filename( entity["filename"] = LogicLinkkf.get_filename(data["save_folder"], data["season"], entity["title"])
data["save_folder"], data["season"], entity["title"]
)
data["episode"].append(entity) data["episode"].append(entity)
idx = idx + 1 idx = idx + 1
@@ -966,11 +973,7 @@ class LogicLinkkf(PluginModuleBase):
else: else:
entity["image_link"] = "" entity["image_link"] = ""
# entity["image_link"] = item.xpath("./a/@data-original")[0] # entity["image_link"] = item.xpath("./a/@data-original")[0]
entity["chapter"] = ( entity["chapter"] = item.xpath("./a/span//text()")[0] if len(item.xpath("./a/span//text()")) > 0 else ""
item.xpath("./a/span//text()")[0]
if len(item.xpath("./a/span//text()")) > 0
else ""
)
# logger.info('entity:::', entity['title']) # logger.info('entity:::', entity['title'])
data["episode"].append(entity) data["episode"].append(entity)
@@ -1011,9 +1014,7 @@ class LogicLinkkf(PluginModuleBase):
# logger.debug('get_html :%s', url) # logger.debug('get_html :%s', url)
headers["Referer"] = "" if referer is None else referer headers["Referer"] = "" if referer is None else referer
page_content = LogicOhli24.session.get( page_content = LogicOhli24.session.get(url, headers=headers, timeout=timeout)
url, headers=headers, timeout=timeout
)
data = page_content.text data = page_content.text
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error("Exception:%s", e)
@@ -1054,9 +1055,7 @@ class LogicLinkkf(PluginModuleBase):
# logger.debug("get_filename()===") # logger.debug("get_filename()===")
# logger.info("title:: %s", title) # logger.info("title:: %s", title)
# logger.info("maintitle:: %s", maintitle) # logger.info("maintitle:: %s", maintitle)
match = re.compile( match = re.compile(r"(?P<title>.*?)\s?((?P<season>\d+)기)?\s?((?P<epi_no>\d+)화?)").search(title)
r"(?P<title>.*?)\s?((?P<season>\d+)기)?\s?((?P<epi_no>\d+)화?)"
).search(title)
if match: if match:
epi_no = int(match.group("epi_no")) epi_no = int(match.group("epi_no"))
if epi_no < 10: if epi_no < 10:
@@ -1152,9 +1151,7 @@ class LogicLinkkf(PluginModuleBase):
try: try:
logger.debug("%s plugin_load", P.package_name) logger.debug("%s plugin_load", P.package_name)
# old version # old version
self.queue = FfmpegQueue( self.queue = FfmpegQueue(P, P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count"))
P, P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
)
self.current_data = None self.current_data = None
self.queue.queue_start() self.queue.queue_start()
@@ -1179,9 +1176,7 @@ class LogicLinkkf(PluginModuleBase):
try: try:
while True: while True:
logger.debug(self.current_download_count) logger.debug(self.current_download_count)
if self.current_download_count < P.ModelSetting.get_int( if self.current_download_count < P.ModelSetting.get_int(f"{self.name}_max_download_count"):
f"{self.name}_max_download_count"
):
break break
time.sleep(5) time.sleep(5)
@@ -1343,9 +1338,7 @@ class ModelLinkkfItem(db.Model):
ret = {x.name: getattr(self, x.name) for x in self.__table__.columns} ret = {x.name: getattr(self, x.name) for x in self.__table__.columns}
ret["created_time"] = self.created_time.strftime("%Y-%m-%d %H:%M:%S") ret["created_time"] = self.created_time.strftime("%Y-%m-%d %H:%M:%S")
ret["completed_time"] = ( ret["completed_time"] = (
self.completed_time.strftime("%Y-%m-%d %H:%M:%S") self.completed_time.strftime("%Y-%m-%d %H:%M:%S") if self.completed_time is not None else None
if self.completed_time is not None
else None
) )
return ret return ret

View File

@@ -34,9 +34,7 @@ for pkg in pkgs:
importlib.import_module(pkg) importlib.import_module(pkg)
# except ImportError: # except ImportError:
except ImportError: except ImportError:
subprocess.check_call( subprocess.check_call([sys.executable, "-m", "pip", "install", "--upgrade", "pip"])
[sys.executable, "-m", "pip", "install", "--upgrade", "pip"]
)
# main(["install", pkg]) # main(["install", pkg])
subprocess.check_call([sys.executable, "-m", "pip", "install", pkg]) subprocess.check_call([sys.executable, "-m", "pip", "install", pkg])
importlib.import_module(pkg) importlib.import_module(pkg)
@@ -175,18 +173,14 @@ class LogicOhli24(PluginModuleBase):
elif sub == "anime_list": elif sub == "anime_list":
data = self.get_anime_info(cate, page) data = self.get_anime_info(cate, page)
return jsonify( return jsonify({"ret": "success", "cate": cate, "page": page, "data": data})
{"ret": "success", "cate": cate, "page": page, "data": data}
)
elif sub == "complete_list": elif sub == "complete_list":
logger.debug("cate:: %s", cate) logger.debug("cate:: %s", cate)
page = request.form["page"] page = request.form["page"]
data = self.get_anime_info(cate, page) data = self.get_anime_info(cate, page)
return jsonify( return jsonify({"ret": "success", "cate": cate, "page": page, "data": data})
{"ret": "success", "cate": cate, "page": page, "data": data}
)
elif sub == "search": elif sub == "search":
query = request.form["query"] query = request.form["query"]
@@ -230,9 +224,7 @@ class LogicOhli24(PluginModuleBase):
print(sub) print(sub)
return {"test"} return {"test"}
elif sub == "queue_command": elif sub == "queue_command":
ret = self.queue.command( ret = self.queue.command(req.form["command"], int(req.form["entity_id"]))
req.form["command"], int(req.form["entity_id"])
)
return jsonify(ret) return jsonify(ret)
elif sub == "add_queue_checked_list": elif sub == "add_queue_checked_list":
data = json.loads(request.form["data"]) data = json.loads(request.form["data"])
@@ -248,9 +240,7 @@ class LogicOhli24(PluginModuleBase):
"type": "success", "type": "success",
"msg": "%s 개의 에피소드를 큐에 추가 하였습니다." % count, "msg": "%s 개의 에피소드를 큐에 추가 하였습니다." % count,
} }
socketio.emit( socketio.emit("notify", notify, namespace="/framework", broadcast=True)
"notify", notify, namespace="/framework", broadcast=True
)
thread = threading.Thread(target=func, args=()) thread = threading.Thread(target=func, args=())
thread.daemon = True thread.daemon = True
@@ -304,27 +294,25 @@ class LogicOhli24(PluginModuleBase):
if command == "queue_list": if command == "queue_list":
logger.debug("queue_list") logger.debug("queue_list")
logger.debug( logger.debug(f"self.queue.get_entity_list():: {self.queue.get_entity_list()}")
f"self.queue.get_entity_list():: {self.queue.get_entity_list()}"
)
ret = [x for x in self.queue.get_entity_list()] ret = [x for x in self.queue.get_entity_list()]
return ret return ret
elif command == "download_program": elif command == "download_program":
_pass = arg2 _pass = arg2
db_item = ModelOhli24Program.get(arg1) db_item = ModelOhli24Program.get(arg1)
if _pass == "false" and db_item != None: if _pass == "false" and db_item is not None:
ret["ret"] = "warning" ret["ret"] = "warning"
ret["msg"] = "이미 DB에 있는 항목 입니다." ret["msg"] = "이미 DB에 있는 항목 입니다."
elif ( elif (
_pass == "true" _pass == "true"
and db_item != None and db_item is not None
and ModelOhli24Program.get_by_id_in_queue(db_item.id) != None and ModelOhli24Program.get_by_id_in_queue(db_item.id) is not None
): ):
ret["ret"] = "warning" ret["ret"] = "warning"
ret["msg"] = "이미 큐에 있는 항목 입니다." ret["msg"] = "이미 큐에 있는 항목 입니다."
else: else:
if db_item == None: if db_item is None:
db_item = ModelOhli24Program(arg1, self.get_episode(arg1)) db_item = ModelOhli24Program(arg1, self.get_episode(arg1))
db_item.save() db_item.save()
db_item.init_for_queue() db_item.init_for_queue()
@@ -370,21 +358,14 @@ class LogicOhli24(PluginModuleBase):
# str(x.strip().replace(" ", "")) # str(x.strip().replace(" ", ""))
# for x in whitelist_program.replace("\n", "|").split("|") # for x in whitelist_program.replace("\n", "|").split("|")
# ] # ]
whitelist_programs = [ whitelist_programs = [str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")]
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")
]
if code not in whitelist_programs: if code not in whitelist_programs:
whitelist_programs.append(code) whitelist_programs.append(code)
whitelist_programs = filter( whitelist_programs = filter(lambda x: x != "", whitelist_programs) # remove blank code
lambda x: x != "", whitelist_programs
) # remove blank code
whitelist_program = "|".join(whitelist_programs) whitelist_program = "|".join(whitelist_programs)
entity = ( entity = (
db.session.query(P.ModelSetting) db.session.query(P.ModelSetting).filter_by(key="ohli24_auto_code_list").with_for_update().first()
.filter_by(key="ohli24_auto_code_list")
.with_for_update()
.first()
) )
entity.value = whitelist_program entity.value = whitelist_program
db.session.commit() db.session.commit()
@@ -405,12 +386,8 @@ class LogicOhli24(PluginModuleBase):
return ret return ret
def setting_save_after(self, change_list): def setting_save_after(self, change_list):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int( if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int("ohli24_max_ffmpeg_process_count"):
"ohli24_max_ffmpeg_process_count" self.queue.set_max_ffmpeg_count(P.ModelSetting.get_int("ohli24_max_ffmpeg_process_count"))
):
self.queue.set_max_ffmpeg_count(
P.ModelSetting.get_int("ohli24_max_ffmpeg_process_count")
)
def scheduler_function(self): def scheduler_function(self):
# Todo: 스케쥴링 함수 미구현 # Todo: 스케쥴링 함수 미구현
@@ -488,11 +465,7 @@ class LogicOhli24(PluginModuleBase):
code = urllib.parse.quote(code) code = urllib.parse.quote(code)
try: try:
if ( if self.current_data is not None and "code" in self.current_data and self.current_data["code"] == code:
self.current_data is not None
and "code" in self.current_data
and self.current_data["code"] == code
):
return self.current_data return self.current_data
if code.startswith("http"): if code.startswith("http"):
@@ -522,13 +495,7 @@ class LogicOhli24(PluginModuleBase):
if wr_id is not None: if wr_id is not None:
# print(len(wr_id)) # print(len(wr_id))
if len(wr_id) > 0: if len(wr_id) > 0:
url = ( url = P.ModelSetting.get("ohli24_url") + "/bbs/board.php?bo_table=" + bo_table + "&wr_id=" + wr_id
P.ModelSetting.get("ohli24_url")
+ "/bbs/board.php?bo_table="
+ bo_table
+ "&wr_id="
+ wr_id
)
else: else:
pass pass
@@ -586,10 +553,7 @@ class LogicOhli24(PluginModuleBase):
title = li.xpath(".//a/text()")[0].strip() title = li.xpath(".//a/text()")[0].strip()
thumbnail = image thumbnail = image
# logger.info(li.xpath('//a[@class="item-subject"]/@href')) # logger.info(li.xpath('//a[@class="item-subject"]/@href'))
link = ( link = P.ModelSetting.get("ohli24_url") + li.xpath('.//a[@class="item-subject"]/@href')[0]
P.ModelSetting.get("ohli24_url")
+ li.xpath('.//a[@class="item-subject"]/@href')[0]
)
# logger.debug(f"link:: {link}") # logger.debug(f"link:: {link}")
_date = li.xpath('.//div[@class="wr-date"]/text()')[0] _date = li.xpath('.//div[@class="wr-date"]/text()')[0]
m = hashlib.md5(title.encode("utf-8")) m = hashlib.md5(title.encode("utf-8"))
@@ -626,9 +590,7 @@ class LogicOhli24(PluginModuleBase):
# logger.info("images:: %s", image) # logger.info("images:: %s", image)
logger.info("title:: %s", title) logger.info("title:: %s", title)
ser_description = tree.xpath( ser_description = tree.xpath('//div[@class="view-stocon"]/div[@class="c"]/text()')
'//div[@class="view-stocon"]/div[@class="c"]/text()'
)
data = { data = {
"title": title, "title": title,
@@ -655,29 +617,11 @@ class LogicOhli24(PluginModuleBase):
print(cate, page) print(cate, page)
try: try:
if cate == "ing": if cate == "ing":
url = ( url = P.ModelSetting.get("ohli24_url") + "/bbs/board.php?bo_table=" + cate + "&page=" + page
P.ModelSetting.get("ohli24_url")
+ "/bbs/board.php?bo_table="
+ cate
+ "&page="
+ page
)
elif cate == "movie": elif cate == "movie":
url = ( url = P.ModelSetting.get("ohli24_url") + "/bbs/board.php?bo_table=" + cate + "&page=" + page
P.ModelSetting.get("ohli24_url")
+ "/bbs/board.php?bo_table="
+ cate
+ "&page="
+ page
)
else: else:
url = ( url = P.ModelSetting.get("ohli24_url") + "/bbs/board.php?bo_table=" + cate + "&page=" + page
P.ModelSetting.get("ohli24_url")
+ "/bbs/board.php?bo_table="
+ cate
+ "&page="
+ page
)
# cate == "complete": # cate == "complete":
logger.info("url:::> %s", url) logger.info("url:::> %s", url)
data = {} data = {}
@@ -691,9 +635,7 @@ class LogicOhli24(PluginModuleBase):
entity = {} entity = {}
entity["link"] = item.xpath(".//a/@href")[0] entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1] entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[ entity["title"] = item.xpath(".//div[@class='post-title']/text()")[0].strip()
0
].strip()
# logger.debug(item.xpath(".//div[@class='img-item']/img/@src")[0]) # logger.debug(item.xpath(".//div[@class='img-item']/img/@src")[0])
# logger.debug(item.xpath(".//div[@class='img-item']/img/@data-ezsrc")[0]) # logger.debug(item.xpath(".//div[@class='img-item']/img/@data-ezsrc")[0])
# entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ # entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
@@ -701,13 +643,11 @@ class LogicOhli24(PluginModuleBase):
# ].replace("..", P.ModelSetting.get("ohli24_url")) # ].replace("..", P.ModelSetting.get("ohli24_url"))
if len(item.xpath(".//div[@class='img-item']/img/@src")) > 0: if len(item.xpath(".//div[@class='img-item']/img/@src")) > 0:
entity["image_link"] = item.xpath( entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[0].replace(
".//div[@class='img-item']/img/@src" "..", P.ModelSetting.get("ohli24_url")
)[0].replace("..", P.ModelSetting.get("ohli24_url")) )
else: else:
entity["image_link"] = item.xpath( entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@data-ezsrc")[0]
".//div[@class='img-item']/img/@data-ezsrc"
)[0]
data["ret"] = "success" data["ret"] = "success"
data["anime_list"].append(entity) data["anime_list"].append(entity)
@@ -733,12 +673,10 @@ class LogicOhli24(PluginModuleBase):
entity = {} entity = {}
entity["link"] = item.xpath(".//a/@href")[0] entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1] entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[ entity["title"] = item.xpath(".//div[@class='post-title']/text()")[0].strip()
0 entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[0].replace(
].strip() "..", P.ModelSetting.get("ohli24_url")
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ )
0
].replace("..", P.ModelSetting.get("ohli24_url"))
data["ret"] = "success" data["ret"] = "success"
data["anime_list"].append(entity) data["anime_list"].append(entity)
@@ -774,12 +712,10 @@ class LogicOhli24(PluginModuleBase):
# entity["code"] = entity["link"].split("/")[-1] # entity["code"] = entity["link"].split("/")[-1]
entity["wr_id"] = entity["link"].split("=")[-1] entity["wr_id"] = entity["link"].split("=")[-1]
# logger.debug(item.xpath(".//div[@class='post-title']/text()").join()) # logger.debug(item.xpath(".//div[@class='post-title']/text()").join())
entity["title"] = "".join( entity["title"] = "".join(item.xpath(".//div[@class='post-title']/text()")).strip()
item.xpath(".//div[@class='post-title']/text()") entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[0].replace(
).strip() "..", P.ModelSetting.get("ohli24_url")
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ )
0
].replace("..", P.ModelSetting.get("ohli24_url"))
entity["code"] = item.xpath(".//div[@class='img-item']/img/@alt")[0] entity["code"] = item.xpath(".//div[@class='img-item']/img/@alt")[0]
@@ -839,9 +775,7 @@ class LogicOhli24(PluginModuleBase):
return True return True
@staticmethod @staticmethod
def get_html( def get_html(url, headers=None, referer=None, stream=False, timeout=10, stealth=False):
url, headers=None, referer=None, stream=False, timeout=10, stealth=False
):
data = "" data = ""
if headers is None: if headers is None:
headers = { headers = {
@@ -878,9 +812,7 @@ class LogicOhli24(PluginModuleBase):
"https": "http://192.168.0.2:3138", "https": "http://192.168.0.2:3138",
} }
page_content = LogicOhli24.session.get( page_content = LogicOhli24.session.get(url, headers=LogicOhli24.headers, timeout=timeout, proxies=proxies)
url, headers=LogicOhli24.headers, timeout=timeout, proxies=proxies
)
response_data = page_content.text response_data = page_content.text
# logger.debug(response_data) # logger.debug(response_data)
return response_data return response_data
@@ -977,10 +909,7 @@ class LogicOhli24(PluginModuleBase):
} }
socketio.emit("notify", data, namespace="/framework", broadcast=True) socketio.emit("notify", data, namespace="/framework", broadcast=True)
refresh_type = "add" refresh_type = "add"
elif ( elif args["status"] == SupportFfmpeg.Status.ERROR or args["status"] == SupportFfmpeg.Status.EXCEPTION:
args["status"] == SupportFfmpeg.Status.ERROR
or args["status"] == SupportFfmpeg.Status.EXCEPTION
):
data = { data = {
"type": "warning", "type": "warning",
"msg": "다운로드 시작 실패.<br>" + args["data"]["save_fullpath"], "msg": "다운로드 시작 실패.<br>" + args["data"]["save_fullpath"],
@@ -1115,9 +1044,7 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
logger.info(f"self.info:::> {self.info}") logger.info(f"self.info:::> {self.info}")
# text = requests.get(url, headers=headers).text # text = requests.get(url, headers=headers).text
text = LogicOhli24.get_html( text = LogicOhli24.get_html(url, headers=headers, referer=f"{ourls.scheme}://{ourls.netloc}")
url, headers=headers, referer=f"{ourls.scheme}://{ourls.netloc}"
)
# logger.debug(text) # logger.debug(text)
soup1 = BeautifulSoup(text, "lxml") soup1 = BeautifulSoup(text, "lxml")
pattern = re.compile(r"url : \"\.\.(.*)\"") pattern = re.compile(r"url : \"\.\.(.*)\"")
@@ -1157,9 +1084,7 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
soup3 = BeautifulSoup(resp1, "lxml") soup3 = BeautifulSoup(resp1, "lxml")
# packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL) # packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL)
s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL) s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL)
packed_pattern = re.compile( packed_pattern = re.compile(r"if?.([^{}]+)\{.*(eval.+)\}.+else?.{.(eval.+)\}", re.DOTALL)
r"if?.([^{}]+)\{.*(eval.+)\}.+else?.{.(eval.+)\}", re.DOTALL
)
packed_script = soup3.find("script", text=s_pattern) packed_script = soup3.find("script", text=s_pattern)
# packed_script = soup3.find('script') # packed_script = soup3.find('script')
# logger.info("packed_script>>> %s", packed_script.text) # logger.info("packed_script>>> %s", packed_script.text)
@@ -1213,9 +1138,7 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
payload = { payload = {
"hash": video_hash[-1], "hash": video_hash[-1],
} }
resp2 = requests.post( resp2 = requests.post(video_info_url, headers=headers, data=payload, timeout=20).json()
video_info_url, headers=headers, data=payload, timeout=20
).json()
logger.debug("resp2::> %s", resp2) logger.debug("resp2::> %s", resp2)
@@ -1249,10 +1172,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
self.quality = match.group("quality") self.quality = match.group("quality")
logger.info(self.quality) logger.info(self.quality)
match = re.compile( match = re.compile(r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)" % ("", "")).search(
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)" self.info["title"]
% ("", "") )
).search(self.info["title"])
# epi_no 초기값 # epi_no 초기값
epi_no = 1 epi_no = 1
@@ -1293,24 +1215,16 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
folder_name = Util.change_text_for_use_filename(folder_name.strip()) folder_name = Util.change_text_for_use_filename(folder_name.strip())
self.savepath = os.path.join(self.savepath, folder_name) self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"): if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"):
self.savepath = os.path.join( self.savepath = os.path.join(self.savepath, "Season %s" % int(self.season))
self.savepath, "Season %s" % int(self.season)
)
self.filepath = os.path.join(self.savepath, self.filename) self.filepath = os.path.join(self.savepath, self.filename)
if not os.path.exists(self.savepath): if not os.path.exists(self.savepath):
os.makedirs(self.savepath) os.makedirs(self.savepath)
from framework.common.util import write_file, convert_vtt_to_srt from framework.common.util import write_file, convert_vtt_to_srt
srt_filepath = os.path.join( srt_filepath = os.path.join(self.savepath, self.filename.replace(".mp4", ".ko.srt"))
self.savepath, self.filename.replace(".mp4", ".ko.srt")
)
if ( if self.srt_url is not None and not os.path.exists(srt_filepath) and not ("thumbnails.vtt" in self.srt_url):
self.srt_url is not None
and not os.path.exists(srt_filepath)
and not ("thumbnails.vtt" in self.srt_url)
):
if requests.get(self.srt_url, headers=headers).status_code == 200: if requests.get(self.srt_url, headers=headers).status_code == 200:
srt_data = requests.get(self.srt_url, headers=headers).text srt_data = requests.get(self.srt_url, headers=headers).text
Util.write_file(srt_data, srt_filepath) Util.write_file(srt_data, srt_filepath)
@@ -1433,9 +1347,7 @@ class ModelOhli24Item(ModelBase):
ret = {x.name: getattr(self, x.name) for x in self.__table__.columns} ret = {x.name: getattr(self, x.name) for x in self.__table__.columns}
ret["created_time"] = self.created_time.strftime("%Y-%m-%d %H:%M:%S") ret["created_time"] = self.created_time.strftime("%Y-%m-%d %H:%M:%S")
ret["completed_time"] = ( ret["completed_time"] = (
self.completed_time.strftime("%Y-%m-%d %H:%M:%S") self.completed_time.strftime("%Y-%m-%d %H:%M:%S") if self.completed_time is not None else None
if self.completed_time is not None
else None
) )
return ret return ret
@@ -1511,9 +1423,7 @@ class ModelOhli24Item(ModelBase):
if option == "completed": if option == "completed":
query = query.filter(cls.status == "completed") query = query.filter(cls.status == "completed")
query = ( query = query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id)
query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id)
)
return query return query
@classmethod @classmethod
@@ -1586,7 +1496,7 @@ class ModelOhli24Program(ModelBase):
@classmethod @classmethod
def is_duplicate(cls, clip_id): def is_duplicate(cls, clip_id):
return cls.get(clip_id) != None return cls.get(clip_id) is not None
# 오버라이딩 # 오버라이딩
@classmethod @classmethod
@@ -1620,11 +1530,12 @@ class ModelOhli24Program(ModelBase):
with F.app.app_context(): with F.app.app_context():
return db.session.query(cls).filter_by(completed=False).all() return db.session.query(cls).filter_by(completed=False).all()
### only for queue # only for queue
@classmethod @classmethod
def get_by_id_in_queue(cls, id): def get_by_id_in_queue(cls, id):
for _ in cls.queue_list: for _ in cls.queue_list:
if _.id == int(id): if _.id == int(id):
return _ return _
### only for queue END # only for queue END

2
nest_api/anime-api/.env Normal file
View File

@@ -0,0 +1,2 @@
NODE_ENV=development
NODE_SERVER_PORT=7070

View File

@@ -0,0 +1,25 @@
module.exports = {
parser: '@typescript-eslint/parser',
parserOptions: {
project: 'tsconfig.json',
tsconfigRootDir : __dirname,
sourceType: 'module',
},
plugins: ['@typescript-eslint/eslint-plugin'],
extends: [
'plugin:@typescript-eslint/recommended',
'plugin:prettier/recommended',
],
root: true,
env: {
node: true,
jest: true,
},
ignorePatterns: ['.eslintrc.js'],
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
},
};

35
nest_api/anime-api/.gitignore vendored Normal file
View File

@@ -0,0 +1,35 @@
# compiled output
/dist
/node_modules
# Logs
logs
*.log
npm-debug.log*
pnpm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# OS
.DS_Store
# Tests
/coverage
/.nyc_output
# IDEs and editors
/.idea
.project
.classpath
.c9/
*.launch
.settings/
*.sublime-workspace
# IDE - VSCode
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json

View File

@@ -0,0 +1 @@
18.12.0

View File

@@ -0,0 +1,4 @@
{
"singleQuote": true,
"trailingComma": "all"
}

View File

@@ -0,0 +1,73 @@
<p align="center">
<a href="http://nestjs.com/" target="blank"><img src="https://nestjs.com/img/logo-small.svg" width="200" alt="Nest Logo" /></a>
</p>
[circleci-image]: https://img.shields.io/circleci/build/github/nestjs/nest/master?token=abc123def456
[circleci-url]: https://circleci.com/gh/nestjs/nest
<p align="center">A progressive <a href="http://nodejs.org" target="_blank">Node.js</a> framework for building efficient and scalable server-side applications.</p>
<p align="center">
<a href="https://www.npmjs.com/~nestjscore" target="_blank"><img src="https://img.shields.io/npm/v/@nestjs/core.svg" alt="NPM Version" /></a>
<a href="https://www.npmjs.com/~nestjscore" target="_blank"><img src="https://img.shields.io/npm/l/@nestjs/core.svg" alt="Package License" /></a>
<a href="https://www.npmjs.com/~nestjscore" target="_blank"><img src="https://img.shields.io/npm/dm/@nestjs/common.svg" alt="NPM Downloads" /></a>
<a href="https://circleci.com/gh/nestjs/nest" target="_blank"><img src="https://img.shields.io/circleci/build/github/nestjs/nest/master" alt="CircleCI" /></a>
<a href="https://coveralls.io/github/nestjs/nest?branch=master" target="_blank"><img src="https://coveralls.io/repos/github/nestjs/nest/badge.svg?branch=master#9" alt="Coverage" /></a>
<a href="https://discord.gg/G7Qnnhy" target="_blank"><img src="https://img.shields.io/badge/discord-online-brightgreen.svg" alt="Discord"/></a>
<a href="https://opencollective.com/nest#backer" target="_blank"><img src="https://opencollective.com/nest/backers/badge.svg" alt="Backers on Open Collective" /></a>
<a href="https://opencollective.com/nest#sponsor" target="_blank"><img src="https://opencollective.com/nest/sponsors/badge.svg" alt="Sponsors on Open Collective" /></a>
<a href="https://paypal.me/kamilmysliwiec" target="_blank"><img src="https://img.shields.io/badge/Donate-PayPal-ff3f59.svg"/></a>
<a href="https://opencollective.com/nest#sponsor" target="_blank"><img src="https://img.shields.io/badge/Support%20us-Open%20Collective-41B883.svg" alt="Support us"></a>
<a href="https://twitter.com/nestframework" target="_blank"><img src="https://img.shields.io/twitter/follow/nestframework.svg?style=social&label=Follow"></a>
</p>
<!--[![Backers on Open Collective](https://opencollective.com/nest/backers/badge.svg)](https://opencollective.com/nest#backer)
[![Sponsors on Open Collective](https://opencollective.com/nest/sponsors/badge.svg)](https://opencollective.com/nest#sponsor)-->
## Description
[Nest](https://github.com/nestjs/nest) framework TypeScript starter repository.
## Installation
```bash
$ npm install
```
## Running the app
```bash
# development
$ npm run start
# watch mode
$ npm run start:dev
# production mode
$ npm run start:prod
```
## Test
```bash
# unit tests
$ npm run test
# e2e tests
$ npm run test:e2e
# test coverage
$ npm run test:cov
```
## Support
Nest is an MIT-licensed open source project. It can grow thanks to the sponsors and support by the amazing backers. If you'd like to join them, please [read more here](https://docs.nestjs.com/support).
## Stay in touch
- Author - [Kamil Myśliwiec](https://kamilmysliwiec.com)
- Website - [https://nestjs.com](https://nestjs.com/)
- Twitter - [@nestframework](https://twitter.com/nestframework)
## License
Nest is [MIT licensed](LICENSE).

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://json.schemastore.org/nest-cli",
"collection": "@nestjs/schematics",
"sourceRoot": "src"
}

17944
nest_api/anime-api/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,77 @@
{
"name": "anime-api",
"version": "0.0.1",
"description": "",
"author": "",
"private": true,
"license": "UNLICENSED",
"scripts": {
"copy-files": "cpx \"src/config/*.yaml\" dist/config/",
"dev": "ne",
"prebuild": "rimraf dist",
"build": "nest build",
"format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
"start": "nest start",
"start:dev": "npm run copy-files && nest start --watch",
"start:debug": "nest start --debug --watch",
"start:prod": "node dist/main",
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix",
"test": "jest",
"test:watch": "jest --watch",
"test:cov": "jest --coverage",
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
"test:e2e": "jest --config ./test/jest-e2e.json"
},
"dependencies": {
"@nestjs/common": "^9.0.0",
"@nestjs/config": "^2.2.0",
"@nestjs/core": "^9.0.0",
"@nestjs/platform-express": "^9.0.0",
"@types/js-yaml": "^4.0.5",
"cpx": "^1.5.0",
"js-yaml": "^4.1.0",
"reflect-metadata": "^0.1.13",
"rimraf": "^3.0.2",
"rxjs": "^7.2.0"
},
"devDependencies": {
"@nestjs/cli": "^9.0.0",
"@nestjs/schematics": "^9.0.0",
"@nestjs/testing": "^9.0.0",
"@types/express": "^4.17.13",
"@types/jest": "28.1.8",
"@types/node": "^16.0.0",
"@types/supertest": "^2.0.11",
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"eslint": "^8.0.1",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^4.0.0",
"jest": "28.1.3",
"prettier": "^2.3.2",
"source-map-support": "^0.5.20",
"supertest": "^6.1.3",
"ts-jest": "28.0.8",
"ts-loader": "^9.2.3",
"ts-node": "^10.0.0",
"tsconfig-paths": "4.1.0",
"typescript": "^4.7.4"
},
"jest": {
"moduleFileExtensions": [
"js",
"json",
"ts"
],
"rootDir": "src",
"testRegex": ".*\\.spec\\.ts$",
"transform": {
"^.+\\.(t|j)s$": "ts-jest"
},
"collectCoverageFrom": [
"**/*.(t|j)s"
],
"coverageDirectory": "../coverage",
"testEnvironment": "node"
}
}

View File

@@ -0,0 +1,22 @@
import { Test, TestingModule } from '@nestjs/testing';
import { AppController } from './app.controller';
import { AppService } from './app.service';
describe('AppController', () => {
let appController: AppController;
beforeEach(async () => {
const app: TestingModule = await Test.createTestingModule({
controllers: [AppController],
providers: [AppService],
}).compile();
appController = app.get<AppController>(AppController);
});
describe('root', () => {
it('should return "Hello World!"', () => {
expect(appController.getHello()).toBe('Hello World!');
});
});
});

View File

@@ -0,0 +1,12 @@
import { Controller, Get } from '@nestjs/common';
import { AppService } from './app.service';
@Controller()
export class AppController {
constructor(private readonly appService: AppService) {}
@Get()
getHello(): string {
return this.appService.getHello();
}
}

View File

@@ -0,0 +1,17 @@
import { Module } from '@nestjs/common';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { ConfigModule } from '@nestjs/config';
import config from './config/config';
@Module({
imports: [
ConfigModule.forRoot({
load: [config],
isGlobal: true,
}),
],
controllers: [AppController],
providers: [AppService],
})
export class AppModule {}

View File

@@ -0,0 +1,8 @@
import { Injectable } from '@nestjs/common';
@Injectable()
export class AppService {
getHello(): string {
return 'Hello World!';
}
}

View File

@@ -0,0 +1,14 @@
import { readFileSync } from 'fs';
import * as yaml from 'js-yaml';
import { join } from 'path';
const YAML_CONFIG_PROD = 'production.yaml';
const YAML_CONFIG_DEV = 'development.yaml';
export default () => {
return yaml.load(
process.env.NODE_ENV === 'production'
? readFileSync(join(__dirname, YAML_CONFIG_PROD), 'utf8')
: readFileSync(join(__dirname, YAML_CONFIG_DEV), 'utf8'),
) as Record<string, any>;
};

View File

@@ -0,0 +1,2 @@
server:
port: 7070

View File

@@ -0,0 +1,2 @@
server:
port: 3003

View File

@@ -0,0 +1,14 @@
import { Logger } from '@nestjs/common';
import { NestFactory } from '@nestjs/core';
import { AppModule } from './app.module';
import { ConfigService } from '@nestjs/config';
// const port = process.env.NODE_SERVER_PORT;
async function bootstrap() {
const app = await NestFactory.create(AppModule);
const configService = app.get(ConfigService);
const port = configService.get('NODE_SERVER_PORT');
await app.listen(port);
Logger.log(`Application listening on port ${port}`);
}
bootstrap();

View File

@@ -0,0 +1,24 @@
import { Test, TestingModule } from '@nestjs/testing';
import { INestApplication } from '@nestjs/common';
import * as request from 'supertest';
import { AppModule } from './../src/app.module';
describe('AppController (e2e)', () => {
let app: INestApplication;
beforeEach(async () => {
const moduleFixture: TestingModule = await Test.createTestingModule({
imports: [AppModule],
}).compile();
app = moduleFixture.createNestApplication();
await app.init();
});
it('/ (GET)', () => {
return request(app.getHttpServer())
.get('/')
.expect(200)
.expect('Hello World!');
});
});

View File

@@ -0,0 +1,9 @@
{
"moduleFileExtensions": ["js", "json", "ts"],
"rootDir": ".",
"testEnvironment": "node",
"testRegex": ".e2e-spec.ts$",
"transform": {
"^.+\\.(t|j)s$": "ts-jest"
}
}

View File

@@ -0,0 +1,4 @@
{
"extends": "./tsconfig.json",
"exclude": ["node_modules", "test", "dist", "**/*spec.ts"]
}

View File

@@ -0,0 +1,21 @@
{
"compilerOptions": {
"module": "commonjs",
"declaration": true,
"removeComments": true,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"allowSyntheticDefaultImports": true,
"target": "es2017",
"sourceMap": true,
"outDir": "./dist",
"baseUrl": "./",
"incremental": true,
"skipLibCheck": true,
"strictNullChecks": false,
"noImplicitAny": false,
"strictBindCallApply": false,
"forceConsistentCasingInFileNames": false,
"noFallthroughCasesInSwitch": false
}
}

View File

@@ -1,6 +1,6 @@
{% extends "base.html" %} {% block content %} {% extends "base.html" %} {% block content %}
<!--<div id="preloader"></div>--> <!--<div id="preloader"></div>-->
<div id="anime_downloader_wrapper"> <div id="anime_downloader_wrapper">
<div id="preloader" class="loader"> <div id="preloader" class="loader">
<div class="loader-inner"> <div class="loader-inner">
<div class="loader-line-wrap"> <div class="loader-line-wrap">
@@ -35,7 +35,6 @@
</button> </button>
</div> </div>
<div <div
id="anime_category" id="anime_category"
class="btn-group" class="btn-group"
@@ -44,7 +43,9 @@
> >
<button id="ing" type="button" class="btn btn-success">방영중</button> <button id="ing" type="button" class="btn btn-success">방영중</button>
<button id="movie" type="button" class="btn btn-primary">극장판</button> <button id="movie" type="button" class="btn btn-primary">극장판</button>
<button id="complete_anilist" type="button" class="btn btn-dark">완결</button> <button id="complete_anilist" type="button" class="btn btn-dark">
완결
</button>
<button id="top_view" type="button" class="btn btn-yellow">Top</button> <button id="top_view" type="button" class="btn btn-yellow">Top</button>
</div> </div>
<form id="airing_list_form"> <form id="airing_list_form">
@@ -53,28 +54,34 @@
<form id="screen_movie_list_form"> <form id="screen_movie_list_form">
<div id="screen_movie_list" class="container"></div> <div id="screen_movie_list" class="container"></div>
</form> </form>
{# <div class="text-center">#} {#
{# <div id="spinner" class="spinner-border" role="status">#} <div class="text-center">
{# <span class="sr-only">Loading...</span>#} #} {#
{# </div>#} <div id="spinner" class="spinner-border" role="status">
{# </div>#} #} {# <span class="sr-only">Loading...</span>#} {#
</div>
#} {#
</div>
#}
<form id="program_auto_form"> <form id="program_auto_form">
<div id="episode_list"></div> <div id="episode_list"></div>
</form> </form>
</div> </div>
</div> </div>
<!--전체--> <!--전체-->
<script <script
type="text/javascript" type="text/javascript"
src="https://cdn.jsdelivr.net/npm/lozad/dist/lozad.min.js" src="https://cdn.jsdelivr.net/npm/lozad/dist/lozad.min.js"
></script> ></script>
<script src="{{ url_for('.static', filename='js/sjva_ui14.js') }}"></script> <script src="{{ url_for('.static', filename='js/sjva_ui14.js') }}"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.lazyload/1.9.1/jquery.lazyload.min.js" <script
src="https://cdnjs.cloudflare.com/ajax/libs/jquery.lazyload/1.9.1/jquery.lazyload.min.js"
integrity="sha512-jNDtFf7qgU0eH/+Z42FG4fw3w7DM/9zbgNPe3wfJlCylVDTT3IgKW5r92Vy9IHa6U50vyMz5gRByIu4YIXFtaQ==" integrity="sha512-jNDtFf7qgU0eH/+Z42FG4fw3w7DM/9zbgNPe3wfJlCylVDTT3IgKW5r92Vy9IHa6U50vyMz5gRByIu4YIXFtaQ=="
crossorigin="anonymous" referrerpolicy="no-referrer"></script> crossorigin="anonymous"
<script type="text/javascript"> referrerpolicy="no-referrer"
></script>
<script type="text/javascript">
const package_name = "{{arg['package_name'] }}"; const package_name = "{{arg['package_name'] }}";
const sub = "{{arg['sub'] }}"; const sub = "{{arg['sub'] }}";
const anilife_url = "{{arg['anilife_url']}}"; const anilife_url = "{{arg['anilife_url']}}";
@@ -316,7 +323,8 @@
let new_style = '' let new_style = ''
console.log('page a: ', page) console.log('page a: ', page)
console.log(data) console.log(data)
console.log(data.episode) console.log(data.data)
//console.log(data.episode)
let page_elem = ""; let page_elem = "";
if (page === undefined) { if (page === undefined) {
@@ -332,8 +340,10 @@
"</button>"; "</button>";
str += "</div>"; str += "</div>";
str += '<div id="inner_screen_movie" class="row infinite-scroll">'; str += '<div id="inner_screen_movie" class="row infinite-scroll">';
for (let i in data.episode) { for (let i in data.data) {
if (data.episode[i].code === data.latest_anime_code) { console.log(i)
console.log(data.data[i])
if (data.data[i].postid === data.latest_anime_code) {
new_anime = false new_anime = false
} }
@@ -351,30 +361,30 @@
// '<img class="card-img-top lazyload" src="./static/img_loader_x200.svg" data-original="' + data.episode[i].image_link + '" />'; // '<img class="card-img-top lazyload" src="./static/img_loader_x200.svg" data-original="' + data.episode[i].image_link + '" />';
tmp += tmp +=
'<img class="card-img-top lazy" src="{{ url_for('.static', filename='img_loader_x200.svg') }}" data-lazy-src="' + '<img class="card-img-top lazy" src="{{ url_for('.static', filename='img_loader_x200.svg') }}" data-lazy-src="' +
data.episode[i].image_link + data.data[i].postthum +
'" style="cursor: pointer" onclick="location.href=\'./request?code=' + '" style="cursor: pointer" onclick="location.href=\'./request?code=' +
data.episode[i].code + data.data[i].postid +
"'\"/>"; "'\"/>";
if (current_cate === "ing") { if (current_cate === "ing") {
tmp += tmp +=
'<span class="badge badge-danger badge-on-image">' + '<span class="badge badge-danger badge-on-image">' +
data.episode[i].chapter + data.data[i].postnote +
"</span>"; "</span>";
} }
// tmp += '<div class="card-body '+ new_anime ? 'new-anime' : '' +'">'; // tmp += '<div class="card-body '+ new_anime ? 'new-anime' : '' +'">';
tmp += '<div class="card-body">'; tmp += '<div class="card-body">';
tmp += '<h5 class="card-title">' + data.episode[i].title + "</h5>"; tmp += '<h5 class="card-title">' + data.data[i].postname + "</h5>";
tmp += tmp +=
'<button id="add_whitelist" name="add_whitelist" class="btn btn-sm btn-favorite mb-1" data-code="' + '<button id="add_whitelist" name="add_whitelist" class="btn btn-sm btn-favorite mb-1" data-code="' +
data.episode[i].code + data.data[i].postid +
'"><p class="card-text">' + '"><p class="card-text">' +
data.episode[i].code + data.data[i].postid +
" <i class=\"bi bi-heart-fill\"></i></p></button>"; " <i class=\"bi bi-heart-fill\"></i></p></button>";
tmp += tmp +=
'<a href="./request?code=' + '<a href="./request?code=' +
data.episode[i].code + data.data[i].postid +
'" class="btn btn-primary cut-text">' + '" class="btn btn-primary cut-text">' +
data.episode[i].title + data.data[i].postname +
"</a>"; "</a>";
tmp += "</div>"; tmp += "</div>";
tmp += "</div>"; tmp += "</div>";
@@ -770,20 +780,19 @@
}, },
!1 !1
); );
</script>
</script> <script
<script
src="https://cdnjs.cloudflare.com/ajax/libs/jquery.lazyload/1.9.1/jquery.lazyload.min.js" src="https://cdnjs.cloudflare.com/ajax/libs/jquery.lazyload/1.9.1/jquery.lazyload.min.js"
integrity="sha512-jNDtFf7qgU0eH/+Z42FG4fw3w7DM/9zbgNPe3wfJlCylVDTT3IgKW5r92Vy9IHa6U50vyMz5gRByIu4YIXFtaQ==" integrity="sha512-jNDtFf7qgU0eH/+Z42FG4fw3w7DM/9zbgNPe3wfJlCylVDTT3IgKW5r92Vy9IHa6U50vyMz5gRByIu4YIXFtaQ=="
crossorigin="anonymous" crossorigin="anonymous"
referrerpolicy="no-referrer" referrerpolicy="no-referrer"
></script> ></script>
<script <script
async async
src="https://cdn.jsdelivr.net/npm/vanilla-lazyload@17.7.0/dist/lazyload.min.js" src="https://cdn.jsdelivr.net/npm/vanilla-lazyload@17.7.0/dist/lazyload.min.js"
></script> ></script>
<style> <style>
#anime_downloader_wrapper { #anime_downloader_wrapper {
font-family: NanumSquareNeo, system-ui, -apple-system, Segoe UI, Roboto, Helvetica Neue, Noto Sans, Liberation Sans, Arial, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol, Noto Color Emoji; font-family: NanumSquareNeo, system-ui, -apple-system, Segoe UI, Roboto, Helvetica Neue, Noto Sans, Liberation Sans, Arial, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol, Noto Color Emoji;
} }
@@ -1170,12 +1179,14 @@
body { body {
background-image: linear-gradient(90deg, #233f48, #6c6fa2, #768dae); background-image: linear-gradient(90deg, #233f48, #6c6fa2, #768dae);
</style>
</style> <link
<link
href="{{ url_for('.static', filename='css/bootstrap.min.css') }}" href="{{ url_for('.static', filename='css/bootstrap.min.css') }}"
type="text/css" type="text/css"
rel="stylesheet" rel="stylesheet"
/> />
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.7.1/font/bootstrap-icons.css"> <link
rel="stylesheet"
href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.7.1/font/bootstrap-icons.css"
/>
{% endblock %} {% endblock %}

View File

@@ -208,6 +208,132 @@
"metadata": { "metadata": {
"collapsed": false "collapsed": false
} }
},
{
"cell_type": "code",
"execution_count": 3,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<module 'select' from '/Users/yommi/.pyenv/versions/3.10.7/lib/python3.10/lib-dynload/select.cpython-310-darwin.so'>\n"
]
}
],
"source": [
"import select\n",
"\n",
"print(select)"
],
"metadata": {
"collapsed": false
}
},
{
"cell_type": "code",
"execution_count": 4,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['KQ_EV_ADD', 'KQ_EV_CLEAR', 'KQ_EV_DELETE', 'KQ_EV_DISABLE', 'KQ_EV_ENABLE', 'KQ_EV_EOF', 'KQ_EV_ERROR', 'KQ_EV_FLAG1', 'KQ_EV_ONESHOT', 'KQ_EV_SYSFLAGS', 'KQ_FILTER_AIO', 'KQ_FILTER_PROC', 'KQ_FILTER_READ', 'KQ_FILTER_SIGNAL', 'KQ_FILTER_TIMER', 'KQ_FILTER_VNODE', 'KQ_FILTER_WRITE', 'KQ_NOTE_ATTRIB', 'KQ_NOTE_CHILD', 'KQ_NOTE_DELETE', 'KQ_NOTE_EXEC', 'KQ_NOTE_EXIT', 'KQ_NOTE_EXTEND', 'KQ_NOTE_FORK', 'KQ_NOTE_LINK', 'KQ_NOTE_LOWAT', 'KQ_NOTE_PCTRLMASK', 'KQ_NOTE_PDATAMASK', 'KQ_NOTE_RENAME', 'KQ_NOTE_REVOKE', 'KQ_NOTE_TRACK', 'KQ_NOTE_TRACKERR', 'KQ_NOTE_WRITE', 'PIPE_BUF', 'POLLERR', 'POLLHUP', 'POLLIN', 'POLLNVAL', 'POLLOUT', 'POLLPRI', 'POLLRDBAND', 'POLLRDNORM', 'POLLWRBAND', 'POLLWRNORM', '__doc__', '__file__', '__loader__', '__name__', '__package__', '__spec__', 'error', 'kevent', 'kqueue', 'poll', 'select']\n"
]
}
],
"source": [
"print(dir(select))"
],
"metadata": {
"collapsed": false
}
},
{
"cell_type": "code",
"execution_count": 6,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"_CountingAttr(counter=23, _default=Factory(factory=<class 'select.kqueue'>, takes_self=False), repr=True, eq=True, order=True, hash=None, init=True, on_setattr=None, metadata={})\n"
]
}
],
"source": [
"import attr\n",
"\n",
"_kqueue = attr.ib(factory=select.kqueue)\n",
"\n",
"print(_kqueue)"
],
"metadata": {
"collapsed": false
}
},
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"source": [],
"metadata": {
"collapsed": false
}
},
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"source": [],
"metadata": {
"collapsed": false
}
},
{
"cell_type": "code",
"execution_count": 1,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"용사 파티에서 추방된 비스트테이머, 최강종의 고양이귀 소녀와 만나다.S01E06.1080p-OHNI24.mp4\n"
]
}
],
"source": [
"import os\n",
"\n",
"t_str = \"/data/anime_downloader/ohli24/용사 파티에서 추방된 비스트테이머, 최강종의 고양이귀 소녀와 만나다/Season 1/용사 파티에서 추방된 비스트테이머, 최강종의 고양이귀 소녀와 만나다.S01E06.1080p-OHNI24.mp4\"\n",
"\n",
"filename = os.path.basename(t_str)\n",
"print(filename)"
],
"metadata": {
"collapsed": false
}
},
{
"cell_type": "code",
"execution_count": 2,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"thumbnails.vtt\n"
]
}
],
"source": [
"srt_url = \"https://cdn1-nydoodle.xyz/cdn/down/e2dbe79957ca1ba094c80cfd4c7604cb/thumbnails.vtt\"\n",
"\n",
"print(srt_url.split('/')[-1])\n"
],
"metadata": {
"collapsed": false
}
} }
], ],
"metadata": { "metadata": {

30
test_01.py Normal file
View File

@@ -0,0 +1,30 @@
import os
import shutil
# specify the path of the folder containing the files
folder_path = "/Users/yommi/Downloads"
# create a dictionary to store the extensions and their respective folders
extension_folders = {}
# loop through the files in the folder
for file_name in os.listdir(folder_path):
print(f"file_name:: {file_name}")
# get the extension of the file
extension = os.path.splitext(file_name)[1]
print(f"extension:: {extension}")
print(type(extension))
if extension is None or extension == "":
continue
# check if the extension has been added to the dictionary yet
if extension not in extension_folders:
# if not, create a new folder for the extension
extension_folder = os.path.join(folder_path, extension[1:])
os.makedirs(extension_folder, exist_ok=True)
extension_folders[extension] = extension_folder
# move the file to its respective folder
src_path = os.path.join(folder_path, file_name)
dst_path = os.path.join(extension_folders[extension], file_name)
shutil.move(src_path, dst_path)

529
yommi_api/main.py Normal file
View File

@@ -0,0 +1,529 @@
import json
from fastapi import FastAPI
import asyncio
import traceback
from typing import Optional, List
from playwright_har_tracer import HarTracer
from pydantic import BaseModel
import sys
import subprocess
import importlib
import uvicorn
from playwright.sync_api import sync_playwright
from playwright.async_api import async_playwright
# pkgs = ["playwright", "playwright_stealth", "playwright_har_tracer", "loguru"]
pkgs = ["playwright", "playwright_stealth", "playwright_har_tracer", "loguru"]
for pkg in pkgs:
try:
importlib.import_module(pkg)
# except ImportError:
except ImportError:
subprocess.check_call(
[sys.executable, "-m", "pip", "install", "--upgrade", "pip"]
)
# main(["install", pkg])
subprocess.check_call([sys.executable, "-m", "pip", "install", pkg])
importlib.import_module(pkg)
from loguru import logger
# try:
# from playwright_stealth import stealth_async
# except:
# pip install playwright_stealth
#
# try:
# import html_to_json
# except:
# pip install html_to_json
# from playwright_har_tracer import HarTracer
import time
import os
user_dir = "tmp/playwright"
user_dir = os.path.join(os.getcwd(), user_dir)
app = FastAPI()
# headers = {
# # ":authority": "anilife.live",
# "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36",
# "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
# "Accept-Language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7",
#
# # "Cookie": ""
# "Cookie": "SL_G_WPT_TO=ko; SL_GWPT_Show_Hide_tmp=1; SL_wptGlobTipTmp=1; DSR=WQYVukjkxKVYEbpgM0pgMs+awM/br6JyMtbfB4OGMC0XEA+UxUxR1RUgOi1mNMoQB16xIEuqk64iex+/ahi72A==; DCSS=FEC4550B310816E1CA91CBE4A0069C43E04F108; SPSI=c9a8435ac1577631126a68a61da5d240; SPSE=aV099+8sLURR7w5MAL1ABihQFpGsh5188ml5NIaMjHbnknx+C/y1qITA7nLCZOTsE67VWb+oacReiz56F3CswA==; anilife_csrf=6e19420853df91fc05732b8be6db4201; UTGv2=h4a5ce301324340f0b03d9e61e42bc6c0416; spcsrf=84aa5294e8eef0a1b2ddac94d3128f29; sp_lit=fggbJYfuR2dVL/kk5POeFA==; PRLST=tw; adOtr=4E9Ccaac551",
# }
headers = {
# 'authority': 'anilife.live',
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"accept-language": "ko-KR,ko;q=0.8",
"cache-control": "no-cache",
# 'cookie': '_ga=GA1.1.578607927.1660813724; __gads=ID=10abb8b98b6828ae-2281c943a9d500fd:T=1660813741:RT=1660813741:S=ALNI_MYU_iB2lBgSrEQUBwhKpNsToaqQ8A; SPSI=5f044d5c641270640d82deeea4c7904a; SPSE=6ysw8BS2tk+H8nN0bo8LOyavaI+InS3i9YuPEzBuEHjrd9GFUl8T3Gd4lg0Wwx/5+zwOrEnqeApQGjdDhqKQiQ==; anilife_csrf=d629470ba1b8a2b81426114a0fd933bb; UTGv2=h46b326af644f4ac5d0eb1502881136b3750; SL_G_WPT_TO=ko; __gpi=UID=000008ba227e99e0:T=1660813741:RT=1668300534:S=ALNI_MaJHIVJIGpQ5nTE9lvypKQxJnn10A; SL_GWPT_Show_Hide_tmp=1; SL_wptGlobTipTmp=1; spcsrf=324bb1134a2ffaeffba5a6d90d4b170d; sp_lit=56vk5DIus4k4khwHctc+NQ==; PRLST=ZY; _ga_56VYJJ7FTM=GS1.1.1668304234.38.1.1668304574.0.0.0; adOtr=44fd5c0Y514',
# 'pragma': 'no-cache',
"referer": "https://anilife.live/g/l?id=65bd6132-e480-4599-bfee-37e0e1eb20e9",
# 'sec-fetch-dest': 'document',
# 'sec-fetch-mode': 'navigate',
# 'sec-fetch-site': 'same-origin',
# 'sec-fetch-user': '?1',
# 'sec-gpc': '1',
# 'upgrade-insecure-requests': '1',
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36",
}
useragent = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, "
"like Gecko) Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"
}
origin_url = None
class PlParam(BaseModel):
url: str
headless: Optional[bool] = False
referer: Optional[str] = None
engine: Optional[str] = "chrome"
stealth: Optional[bool] = (False,)
reload: Optional[bool] = (False,)
@app.get("/")
async def root():
return {"message": "Hello World"}
@app.get("/hello/{name}")
async def say_hello(name: str):
return {"message": f"Hello {name}"}
@app.get("/get_html")
async def get_html():
pass
def intercept_response(response):
# we can extract details from background requests
if response.request.resource_type == "xhr":
print(response.headers.get("cookie"))
return response
async def request_event_handler(response):
# print("HTTP Status code: {}".format(response.status))
# body = await response.body()
# print("HTML body page: {}".format(body))
print("HTTP Cookie")
custom_cookie = await response.all_headers()
print(custom_cookie["cookie"])
@app.post("/get_html_by_playwright")
async def get_html_by_playwright(p_param: PlParam):
# pl_dict = p_param.__dict__
global headers, origin_url
logger.debug(headers)
pl_dict = p_param.dict()
# logger.debug(pl_dict.engine)\
# reload: bool = pl_dict['reload']
logger.debug(pl_dict["engine"])
try:
from playwright.async_api import async_playwright
# from playwright.sync_api import sync_playwright
import time
print("** playwright ==========================================")
cookie = None
browser_args = [
"--window-size=1300,570",
"--window-position=000,000",
"--disable-dev-shm-usage",
"--no-sandbox",
"--disable-web-security",
"--disable-features=site-per-process",
"--disable-setuid-sandbox",
"--disable-accelerated-2d-canvas",
"--no-first-run",
"--no-zygote",
# '--single-process',
"--disable-gpu",
"--use-gl=egl",
"--disable-blink-features=AutomationControlled",
"--disable-background-networking",
"--enable-features=NetworkService,NetworkServiceInProcess",
"--disable-background-timer-throttling",
"--disable-backgrounding-occluded-windows",
"--disable-breakpad",
"--disable-client-side-phishing-detection",
"--disable-component-extensions-with-background-pages",
"--disable-default-apps",
"--disable-extensions",
"--disable-features=Translate",
"--disable-hang-monitor",
"--disable-ipc-flooding-protection",
"--disable-popup-blocking",
"--disable-prompt-on-repost",
"--disable-renderer-backgrounding",
"--disable-sync",
"--force-color-profile=srgb",
"--metrics-recording-only",
"--enable-automation",
"--password-store=basic",
"--use-mock-keychain",
"--hide-scrollbars",
"--mute-audio",
]
browser_args = []
browser = None
# scraper = cloudscraper.create_scraper(
# browser={"browser": "chrome", "platform": "windows", "desktop": True},
# debug=False,
# # sess=LogicAniLife.session,
# delay=10,
# )
#
# cookie_value, user_agent = scraper.get_cookie_string(url)
#
# logger.debug(f"cookie_value:: {cookie_value}")
start = time.time()
ua = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/69.0.3497.100 Safari/537.36"
)
# from playwright_stealth import stealth_sync
# def set_cookie(req):
# nonlocal cookie
# if "cookie" in req.headers:
# cookie = req.headers["cookie"]
# headless = True
# print(pl_dict.engine)
async with async_playwright() as p:
try:
if pl_dict["engine"] == "chrome":
# browser = await p.chromium.launch(
# channel="chrome", args=browser_args, headless=pl_dict["headless"]
# )
browser = await p.chromium.launch_persistent_context(
channel="chrome",
args=browser_args,
headless=pl_dict["headless"],
user_data_dir=user_dir,
)
print(pl_dict["engine"])
# browser = await p.chromium.connect('http://192.168.0.2:14444')
if pl_dict["engine"] == "chromium":
browser = await p.chromium.launch(
channel="chromium",
args=browser_args,
headless=pl_dict["headless"],
)
print(pl_dict["engine"])
elif pl_dict["engine"] == "webkit":
browser = await p.webkit.launch(
headless=pl_dict["headless"],
args=browser_args,
)
else:
print("firefox")
browser = await p.firefox.launch(
headless=pl_dict["headless"],
args=browser_args,
)
# context = browser.new_context(
# user_agent=ua,
# )
# LogicAniLife.headers[
# "Referer"
# ] = "https://anilife.live/detail/id/471"
# print(LogicAniLife.headers)
# headers["referer"] = "https://anilife.live/detail/id/471"
logger.info(headers)
# context = await browser.new_context(
# extra_http_headers=headers
# )
# await context.add_cookies(LogicAniLife.cookies)
# LogicAniLife.headers["Cookie"] = cookie_value
# create a new incognito browser context
context = await browser.new_context()
# create a new page inside context.
page = await context.new_page()
# print(cookie)
# page.on("response", intercept_response)
# page.on(
# "response",
# lambda response: asyncio.create_task(request_event_handler(response)),
# )
await page.set_extra_http_headers(headers)
# if stealth:
# await stealth_async(page)
# page.on("request", set_cookie)
# stealth_sync(page)
# logger.info(headers["referer"])
# page.on("request", set_cookie)
logger.info(f'referer:: {headers["referer"]}')
logger.info(headers)
# await page.set_extra_http_headers(LogicAniLife.headers)
# await page.goto(
# pl_dict["url"], wait_until="load", referer=headers["Referer"]
# )
await page.goto(pl_dict["url"], wait_until="load")
# page.wait_for_timeout(10000)
await asyncio.sleep(2)
logger.debug(pl_dict["reload"])
if pl_dict["reload"]:
await page.reload()
await asyncio.sleep(1)
cookies = await context.cookies()
# logger.debug(cookie)
logger.debug(len(cookies))
json_mylist = json.dumps(cookies, separators=(",", ":"))
# logger.debug(json_mylist)
tmp = ""
for c in cookies:
# print(c["name"])
# print(c["value"])
tmp += f'{c["name"]}={c["value"]}; '
logger.debug(tmp)
headers["cookie"] = tmp
headers["Cookie"] = tmp
# page.on("response", intercept_response)
await asyncio.sleep(0.5)
# time.sleep(10)
# cookies = context.cookies
# print(cookies)
logger.info(f"page.url:: {page.url}")
_url = page.url
origin_url = page.url
headers["referer"] = origin_url
headers["Referer"] = origin_url
# origin_url = page.url
ret_data = await page.content()
logger.info(f"run at {time.time() - start} sec")
await page.close()
# print(ret_data)
return {"success": "ok", "url": _url, "html": ret_data}
except Exception as e:
logger.error(f"Exception: {str(e)}")
logger.error(traceback.format_exc())
except Exception as e:
logger.error(f"Exception: {str(e)}")
logger.error(traceback.format_exc())
@app.post("/get_vod_url")
async def get_vod_url(p_param: PlParam):
pl_dict = p_param.dict()
# logger.debug(pl_dict.engine)
logger.debug(pl_dict["engine"])
har = None
_headless: bool = False
if pl_dict["headless"] is not None:
_headless = pl_dict["headless"]
try:
# scraper = cloudscraper.create_scraper(
# browser={"browser": "chrome", "platform": "windows", "desktop": True},
# debug=False,
# # sess=LogicAniLife.session,
# delay=10,
# )
#
# cookie_value, user_agent = scraper.get_cookie_string(url)
#
# logger.debug(f"cookie_value:: {cookie_value}")
browser_args = [
"--window-size=1300,570",
"--window-position=000,000",
"--disable-dev-shm-usage",
"--no-sandbox",
"--disable-web-security",
"--disable-features=site-per-process",
"--disable-setuid-sandbox",
"--disable-accelerated-2d-canvas",
"--no-first-run",
"--no-zygote",
# "--single-process",
"--disable-gpu",
# "--use-gl=egl",
"--disable-blink-features=AutomationControlled",
"--disable-background-networking",
"--enable-features=NetworkService,NetworkServiceInProcess",
"--disable-background-timer-throttling",
"--disable-backgrounding-occluded-windows",
"--disable-breakpad",
"--disable-client-side-phishing-detection",
"--disable-component-extensions-with-background-pages",
"--disable-default-apps",
"--disable-extensions",
"--disable-features=Translate",
"--disable-hang-monitor",
"--disable-ipc-flooding-protection",
"--disable-popup-blocking",
"--disable-prompt-on-repost",
"--disable-renderer-backgrounding",
"--disable-sync",
"--force-color-profile=srgb",
"--metrics-recording-only",
"--enable-automation",
"--password-store=basic",
"--use-mock-keychain",
"--hide-scrollbars",
"--mute-audio",
]
start = time.time()
ua = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/69.0.3497.100 Safari/537.36"
)
# from playwright_stealth import stealth_sync
async with async_playwright() as p:
try:
# browser = await p.chromium.launch(headless=headless, args=browser_args)
browser = await p.chromium.launch(
headless=pl_dict["headless"], args=browser_args
)
# browser = await p.webkit.launch(headless=headless)
# context = browser.new_context(
# user_agent=ua,
# )
# headers[
# "Referer"
# ] = "https://anilife.live/g/l?id=14344143-040a-4e40-9399-a7d22d94554b"
#
logger.info(f"headers : {headers}")
# context = await browser.new_context(extra_http_headers=LogicAniLife.headers)
context = await browser.new_context()
await context.set_extra_http_headers(headers)
# await context.add_cookies(LogicAniLife.cookies)
tracer = HarTracer(context=context, browser_name=p.chromium.name)
# tracer = HarTracer(context=context, browser_name=p.webkit.name)
# LogicAniLife.headers["Cookie"] = cookie_value
# context.set_extra_http_headers(LogicAniLife.headers)
page = await context.new_page()
# await page.set_extra_http_headers(headers)
# await stealth_async(page)
# logger.debug(url)
# page.on("request", set_cookie)
# stealth_sync(page)
# await page.goto(
# url, wait_until="load", referer=LogicAniLife.headers["Referer"]
# )
# await page.goto(url, wait_until="load")
await page.goto(pl_dict["url"], wait_until="domcontentloaded")
# await page.reload()
har = await tracer.flush()
await context.close()
await browser.close()
# page.wait_for_timeout(10000)
await asyncio.sleep(1)
# logger.debug(har)
# page.reload()
# time.sleep(10)
# cookies = context.cookies
# print(cookies)
# print(page.content())
# vod_url = page.evaluate(
# """() => {
# return console.log(vodUrl_1080p) }"""
# )
# vod_url = page.evaluate(
# """async () =>{
# return _0x55265f(0x99) + alJson[_0x55265f(0x91)]
# }"""
# )
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
result_har_json = har.to_json()
result_har_dict = har.to_dict()
logger.debug(result_har_dict)
tmp_video_url = []
for i, elem in enumerate(result_har_dict["log"]["entries"]):
if "m3u8" in elem["request"]["url"]:
logger.debug(elem["request"]["url"])
tmp_video_url.append(elem["request"]["url"])
vod_url = tmp_video_url[-1]
logger.debug(f"vod_url:: {vod_url}")
logger.debug(f"run at {time.time() - start} sec")
return vod_url
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
# if __name__ == "__main__":
# uvicorn.run("main:app", host="0.0.0.0", port=7070, reload=True)