anilife update 2022.10.23(01.)
This commit is contained in:
275
logic_anilife.py
275
logic_anilife.py
@@ -270,10 +270,13 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
return await page.content()
|
return await page.content()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_vod_url(url, headless=False):
|
async def get_vod_url_v1(
|
||||||
|
url, headless=False, referer=None, engine="chrome", stealth=False
|
||||||
|
):
|
||||||
from playwright.sync_api import sync_playwright
|
from playwright.sync_api import sync_playwright
|
||||||
from playwright_stealth import stealth_sync
|
from playwright.async_api import async_playwright
|
||||||
import html_to_json
|
from playwright_har_tracer import HarTracer
|
||||||
|
from playwright_stealth import stealth_sync, stealth_async
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -295,10 +298,111 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
"Chrome/69.0.3497.100 Safari/537.36"
|
"Chrome/69.0.3497.100 Safari/537.36"
|
||||||
)
|
)
|
||||||
# from playwright_stealth import stealth_sync
|
# from playwright_stealth import stealth_sync
|
||||||
|
cookie = None
|
||||||
|
|
||||||
with sync_playwright() as p:
|
def set_cookie(req):
|
||||||
browser = p.chromium.launch(headless=headless)
|
nonlocal cookie
|
||||||
# browser = p.webkit.launch(headless=headless)
|
if "cookie" in req.headers:
|
||||||
|
cookie = req.headers["cookie"]
|
||||||
|
|
||||||
|
async with async_playwright() as p:
|
||||||
|
if engine == "chrome":
|
||||||
|
browser = await p.chromium.launch(channel="chrome", headless=headless)
|
||||||
|
elif engine == "webkit":
|
||||||
|
browser = await p.webkit.launch(headless=headless)
|
||||||
|
else:
|
||||||
|
browser = await p.firefox.launch(headless=headless)
|
||||||
|
# context = browser.new_context(
|
||||||
|
# user_agent=ua,
|
||||||
|
# )
|
||||||
|
|
||||||
|
LogicAniLife.headers["Referer"] = "https://anilife.live/detail/id/471"
|
||||||
|
# print(LogicAniLife.headers)
|
||||||
|
|
||||||
|
LogicAniLife.headers["Referer"] = LogicAniLife.episode_url
|
||||||
|
|
||||||
|
if referer is not None:
|
||||||
|
LogicAniLife.headers["Referer"] = referer
|
||||||
|
|
||||||
|
logger.debug(f"LogicAniLife.headers::: {LogicAniLife.headers}")
|
||||||
|
context = await browser.new_context(extra_http_headers=LogicAniLife.headers)
|
||||||
|
await context.add_cookies(LogicAniLife.cookies)
|
||||||
|
|
||||||
|
# LogicAniLife.headers["Cookie"] = cookie_value
|
||||||
|
|
||||||
|
# context.set_extra_http_headers(LogicAniLife.headers)
|
||||||
|
tracer = HarTracer(context=context, browser_name=p.webkit.name)
|
||||||
|
|
||||||
|
page = await context.new_page()
|
||||||
|
|
||||||
|
# page.set_extra_http_headers(LogicAniLife.headers)
|
||||||
|
|
||||||
|
if stealth:
|
||||||
|
await stealth_async(page)
|
||||||
|
|
||||||
|
# page.on("request", set_cookie)
|
||||||
|
# stealth_sync(page)
|
||||||
|
print(LogicAniLife.headers["Referer"])
|
||||||
|
|
||||||
|
page.on("request", set_cookie)
|
||||||
|
|
||||||
|
print(f'Referer:: {LogicAniLife.headers["Referer"]}')
|
||||||
|
# await page.set_extra_http_headers(LogicAniLife.headers)
|
||||||
|
|
||||||
|
await page.goto(
|
||||||
|
url, wait_until="load", referer=LogicAniLife.headers["Referer"]
|
||||||
|
)
|
||||||
|
|
||||||
|
har = await tracer.flush()
|
||||||
|
# page.wait_for_timeout(10000)
|
||||||
|
await asyncio.sleep(10)
|
||||||
|
|
||||||
|
# await page.reload()
|
||||||
|
|
||||||
|
# time.sleep(10)
|
||||||
|
# cookies = context.cookies
|
||||||
|
# print(cookies)
|
||||||
|
|
||||||
|
print(f"page.url:: {page.url}")
|
||||||
|
LogicAniLife.origin_url = page.url
|
||||||
|
|
||||||
|
# print(page.content())
|
||||||
|
|
||||||
|
print(f"run at {time.time() - start} sec")
|
||||||
|
|
||||||
|
return await page.content()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_vod_url(url, headless=False):
|
||||||
|
from playwright.sync_api import sync_playwright
|
||||||
|
from playwright.async_api import async_playwright
|
||||||
|
from playwright_stealth import stealth_async
|
||||||
|
import html_to_json
|
||||||
|
from playwright_har_tracer import HarTracer
|
||||||
|
import time
|
||||||
|
|
||||||
|
# scraper = cloudscraper.create_scraper(
|
||||||
|
# browser={"browser": "chrome", "platform": "windows", "desktop": True},
|
||||||
|
# debug=False,
|
||||||
|
# # sess=LogicAniLife.session,
|
||||||
|
# delay=10,
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# cookie_value, user_agent = scraper.get_cookie_string(url)
|
||||||
|
#
|
||||||
|
# logger.debug(f"cookie_value:: {cookie_value}")
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
ua = (
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
||||||
|
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||||
|
"Chrome/69.0.3497.100 Safari/537.36"
|
||||||
|
)
|
||||||
|
# from playwright_stealth import stealth_sync
|
||||||
|
|
||||||
|
async with async_playwright() as p:
|
||||||
|
# browser = await p.chromium.launch(headless=headless)
|
||||||
|
browser = await p.webkit.launch(headless=headless)
|
||||||
# context = browser.new_context(
|
# context = browser.new_context(
|
||||||
# user_agent=ua,
|
# user_agent=ua,
|
||||||
# )
|
# )
|
||||||
@@ -306,38 +410,46 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
LogicAniLife.headers[
|
LogicAniLife.headers[
|
||||||
"Referer"
|
"Referer"
|
||||||
] = "https://anilife.live/g/l?id=14344143-040a-4e40-9399-a7d22d94554b"
|
] = "https://anilife.live/g/l?id=14344143-040a-4e40-9399-a7d22d94554b"
|
||||||
print(LogicAniLife.headers)
|
# print(LogicAniLife.headers)
|
||||||
|
|
||||||
context = browser.new_context(extra_http_headers=LogicAniLife.headers)
|
# context = await browser.new_context(extra_http_headers=LogicAniLife.headers)
|
||||||
|
context = await browser.new_context()
|
||||||
|
await context.set_extra_http_headers(LogicAniLife.headers)
|
||||||
|
|
||||||
context.add_cookies(LogicAniLife.cookies)
|
# await context.add_cookies(LogicAniLife.cookies)
|
||||||
|
|
||||||
|
# tracer = HarTracer(context=context, browser_name=p.chromium.name)
|
||||||
|
tracer = HarTracer(context=context, browser_name=p.webkit.name)
|
||||||
|
|
||||||
# LogicAniLife.headers["Cookie"] = cookie_value
|
# LogicAniLife.headers["Cookie"] = cookie_value
|
||||||
|
|
||||||
# context.set_extra_http_headers(LogicAniLife.headers)
|
# context.set_extra_http_headers(LogicAniLife.headers)
|
||||||
|
|
||||||
page = context.new_page()
|
page = await context.new_page()
|
||||||
|
|
||||||
# page.set_extra_http_headers(LogicAniLife.headers)
|
# await page.set_extra_http_headers(LogicAniLife.headers)
|
||||||
|
|
||||||
stealth_sync(page)
|
# await stealth_async(page)
|
||||||
|
# logger.debug(url)
|
||||||
def set_cookie(req):
|
|
||||||
if "cookie" in req.headers:
|
|
||||||
print(req.headers["cookie"])
|
|
||||||
cookie = req.headers["cookie"]
|
|
||||||
|
|
||||||
# page.on("request", set_cookie)
|
# page.on("request", set_cookie)
|
||||||
# stealth_sync(page)
|
# stealth_sync(page)
|
||||||
page.goto(
|
# await page.goto(
|
||||||
url, wait_until="networkidle", referer=LogicAniLife.headers["Referer"]
|
# url, wait_until="load", referer=LogicAniLife.headers["Referer"]
|
||||||
)
|
# )
|
||||||
|
# await page.goto(url, wait_until="load")
|
||||||
|
await page.goto(url, wait_until="domcontentloaded")
|
||||||
|
|
||||||
|
har = await tracer.flush()
|
||||||
|
|
||||||
# page.wait_for_timeout(10000)
|
# page.wait_for_timeout(10000)
|
||||||
time.sleep(1)
|
await asyncio.sleep(2)
|
||||||
|
|
||||||
|
# logger.debug(har)
|
||||||
# page.reload()
|
# page.reload()
|
||||||
|
|
||||||
# time.sleep(10)
|
# time.sleep(10)
|
||||||
cookies = context.cookies
|
# cookies = context.cookies
|
||||||
# print(cookies)
|
# print(cookies)
|
||||||
|
|
||||||
# print(page.content())
|
# print(page.content())
|
||||||
@@ -345,23 +457,35 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
# """() => {
|
# """() => {
|
||||||
# return console.log(vodUrl_1080p) }"""
|
# return console.log(vodUrl_1080p) }"""
|
||||||
# )
|
# )
|
||||||
vod_url = page.evaluate(
|
|
||||||
"""async () =>{
|
# vod_url = page.evaluate(
|
||||||
return _0x55265f(0x99) + alJson[_0x55265f(0x91)]
|
# """async () =>{
|
||||||
}"""
|
# return _0x55265f(0x99) + alJson[_0x55265f(0x91)]
|
||||||
)
|
# }"""
|
||||||
|
# )
|
||||||
|
result_har_json = har.to_json()
|
||||||
|
result_har_dict = har.to_dict()
|
||||||
|
# logger.debug(result_har_dict)
|
||||||
|
|
||||||
|
tmp_video_url = []
|
||||||
|
for i, elem in enumerate(result_har_dict["log"]["entries"]):
|
||||||
|
if "m3u8" in elem["request"]["url"]:
|
||||||
|
logger.debug(elem["request"]["url"])
|
||||||
|
tmp_video_url.append(elem["request"]["url"])
|
||||||
|
|
||||||
|
vod_url = tmp_video_url[-1]
|
||||||
|
|
||||||
logger.debug(f"vod_url:: {vod_url}")
|
logger.debug(f"vod_url:: {vod_url}")
|
||||||
|
|
||||||
print(f"run at {time.time() - start} sec")
|
logger.debug(f"run at {time.time() - start} sec")
|
||||||
|
|
||||||
# html_content = LogicAniLife.get_html_selenium(
|
# html_content = LogicAniLife.get_html_selenium(
|
||||||
# vod_url, "https://anilife.live"
|
# vod_url, "https://anilife.live"
|
||||||
# )
|
# )
|
||||||
|
|
||||||
html_content = LogicAniLife.get_html_playwright(
|
# html_content = LogicAniLife.get_html_playwright(
|
||||||
vod_url, False, referer="https://anilife.live"
|
# vod_url, False, referer="https://anilife.live"
|
||||||
)
|
# )
|
||||||
|
|
||||||
# html_content = LogicAniLife.get_html(
|
# html_content = LogicAniLife.get_html(
|
||||||
# vod_url, referer="https://anilife.live"
|
# vod_url, referer="https://anilife.live"
|
||||||
@@ -370,10 +494,10 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
# vod_url, referer="https://anilife.live"
|
# vod_url, referer="https://anilife.live"
|
||||||
# )
|
# )
|
||||||
|
|
||||||
print(f"html_content:: {html_content}")
|
# print(f"html_content:: {html_content}")
|
||||||
output_json = html_to_json.convert(html_content)
|
# output_json = html_to_json.convert(html_content)
|
||||||
resolution = output_json["html"][0]["body"][0]["_value"]
|
# resolution = output_json["html"][0]["body"][0]["_value"]
|
||||||
logger.debug(f"output_json:: {resolution}")
|
# logger.debug(f"output_json:: {resolution}")
|
||||||
|
|
||||||
return vod_url
|
return vod_url
|
||||||
|
|
||||||
@@ -571,7 +695,7 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
|
|
||||||
# logger.info("code::: %s", code)
|
# logger.info("code::: %s", code)
|
||||||
P.ModelSetting.set("anilife_current_code", code)
|
P.ModelSetting.set("anilife_current_code", code)
|
||||||
data = self.get_series_info(code, wr_id, bo_table)
|
data = self.get_series_info(code)
|
||||||
self.current_data = data
|
self.current_data = data
|
||||||
return jsonify({"ret": "success", "data": data, "code": code})
|
return jsonify({"ret": "success", "data": data, "code": code})
|
||||||
elif sub == "anime_list":
|
elif sub == "anime_list":
|
||||||
@@ -627,7 +751,7 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# 시리즈 정보를 가져오는 함수
|
# 시리즈 정보를 가져오는 함수
|
||||||
def get_series_info(self, code, wr_id, bo_table):
|
def get_series_info(self, code):
|
||||||
try:
|
try:
|
||||||
if code.isdigit():
|
if code.isdigit():
|
||||||
url = P.ModelSetting.get("anilife_url") + "/detail/id/" + code
|
url = P.ModelSetting.get("anilife_url") + "/detail/id/" + code
|
||||||
@@ -711,7 +835,7 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
episodes.append(
|
episodes.append(
|
||||||
{
|
{
|
||||||
"ep_num": ep_num,
|
"ep_num": ep_num,
|
||||||
"title": title,
|
"title": f"{main_title} {ep_num}화 - {title}",
|
||||||
"link": link,
|
"link": link,
|
||||||
"thumbnail": image,
|
"thumbnail": image,
|
||||||
"date": date,
|
"date": date,
|
||||||
@@ -815,7 +939,6 @@ class LogicAniLife(LogicModuleBase):
|
|||||||
|
|
||||||
print(p.match(entity["link"]) != None)
|
print(p.match(entity["link"]) != None)
|
||||||
if p.match(entity["link"]) is None:
|
if p.match(entity["link"]) is None:
|
||||||
|
|
||||||
entity["link"] = P.ModelSetting.get("anilife_url") + entity["link"]
|
entity["link"] = P.ModelSetting.get("anilife_url") + entity["link"]
|
||||||
# real_url = LogicAniLife.get_real_link(url=entity["link"])
|
# real_url = LogicAniLife.get_real_link(url=entity["link"])
|
||||||
|
|
||||||
@@ -919,10 +1042,11 @@ class AniLifeQueueEntity(FfmpegQueueEntity):
|
|||||||
|
|
||||||
ourls = parse.urlparse(url)
|
ourls = parse.urlparse(url)
|
||||||
|
|
||||||
headers = {
|
self.headers = {
|
||||||
"Referer": f"{ourls.scheme}://{ourls.netloc}",
|
"Referer": f"{ourls.scheme}://{ourls.netloc}",
|
||||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36",
|
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36",
|
||||||
}
|
}
|
||||||
|
|
||||||
headers["Referer"] = "https://anilife.live/detail/id/471"
|
headers["Referer"] = "https://anilife.live/detail/id/471"
|
||||||
headers["Referer"] = LogicAniLife.episode_url
|
headers["Referer"] = LogicAniLife.episode_url
|
||||||
|
|
||||||
@@ -944,13 +1068,15 @@ class AniLifeQueueEntity(FfmpegQueueEntity):
|
|||||||
text = asyncio.run(
|
text = asyncio.run(
|
||||||
LogicAniLife.get_html_playwright(
|
LogicAniLife.get_html_playwright(
|
||||||
url,
|
url,
|
||||||
headless=False,
|
headless=True,
|
||||||
referer=referer_url,
|
referer=referer_url,
|
||||||
engine="chrome",
|
engine="chrome",
|
||||||
stealth=True,
|
stealth=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# vod_1080p_url = text
|
||||||
|
|
||||||
# logger.debug(text)
|
# logger.debug(text)
|
||||||
soup = BeautifulSoup(text, "lxml")
|
soup = BeautifulSoup(text, "lxml")
|
||||||
|
|
||||||
@@ -965,10 +1091,73 @@ class AniLifeQueueEntity(FfmpegQueueEntity):
|
|||||||
if match:
|
if match:
|
||||||
jawcloud_url = match.group("jawcloud_url")
|
jawcloud_url = match.group("jawcloud_url")
|
||||||
|
|
||||||
print(f"jawcloud_url:: {jawcloud_url}")
|
logger.debug(f"jawcloud_url:: {jawcloud_url}")
|
||||||
|
|
||||||
vod_1080p_url = LogicAniLife.get_vod_url(jawcloud_url)
|
# loop = asyncio.new_event_loop()
|
||||||
|
# asyncio.set_event_loop(loop)
|
||||||
|
#
|
||||||
|
logger.info(self.info)
|
||||||
|
|
||||||
|
match = re.compile(
|
||||||
|
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)"
|
||||||
|
% ("기", "화")
|
||||||
|
).search(self.info["title"])
|
||||||
|
|
||||||
|
# epi_no 초기값
|
||||||
|
epi_no = 1
|
||||||
|
self.quality = "1080P"
|
||||||
|
|
||||||
|
if match:
|
||||||
|
self.content_title = match.group("title").strip()
|
||||||
|
if "season" in match.groupdict() and match.group("season") is not None:
|
||||||
|
self.season = int(match.group("season"))
|
||||||
|
|
||||||
|
# epi_no = 1
|
||||||
|
epi_no = int(match.group("epi_no"))
|
||||||
|
ret = "%s.S%sE%s.%s-AL.mp4" % (
|
||||||
|
self.content_title,
|
||||||
|
"0%s" % self.season if self.season < 10 else self.season,
|
||||||
|
"0%s" % epi_no if epi_no < 10 else epi_no,
|
||||||
|
self.quality,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.content_title = self.info["title"]
|
||||||
|
P.logger.debug("NOT MATCH")
|
||||||
|
ret = "%s.720p-AL.mp4" % self.info["title"]
|
||||||
|
|
||||||
|
# logger.info('self.content_title:: %s', self.content_title)
|
||||||
|
self.epi_queue = epi_no
|
||||||
|
|
||||||
|
self.filename = Util.change_text_for_use_filename(ret)
|
||||||
|
logger.info(f"self.filename::> {self.filename}")
|
||||||
|
self.savepath = P.ModelSetting.get("ohli24_download_path")
|
||||||
|
logger.info(f"self.savepath::> {self.savepath}")
|
||||||
|
|
||||||
|
if P.ModelSetting.get_bool("ohli24_auto_make_folder"):
|
||||||
|
if self.info["day"].find("완결") != -1:
|
||||||
|
folder_name = "%s %s" % (
|
||||||
|
P.ModelSetting.get("ohli24_finished_insert"),
|
||||||
|
self.content_title,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
folder_name = self.content_title
|
||||||
|
folder_name = Util.change_text_for_use_filename(folder_name.strip())
|
||||||
|
self.savepath = os.path.join(self.savepath, folder_name)
|
||||||
|
if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"):
|
||||||
|
self.savepath = os.path.join(
|
||||||
|
self.savepath, "Season %s" % int(self.season)
|
||||||
|
)
|
||||||
|
self.filepath = os.path.join(self.savepath, self.filename)
|
||||||
|
if not os.path.exists(self.savepath):
|
||||||
|
os.makedirs(self.savepath)
|
||||||
|
|
||||||
|
vod_1080p_url = asyncio.run(
|
||||||
|
LogicAniLife.get_vod_url(jawcloud_url, headless=False)
|
||||||
|
)
|
||||||
print(f"vod_1080p_url:: {vod_1080p_url}")
|
print(f"vod_1080p_url:: {vod_1080p_url}")
|
||||||
|
self.url = vod_1080p_url
|
||||||
|
|
||||||
|
logger.info(self.url)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
P.logger.error("Exception:%s", e)
|
P.logger.error("Exception:%s", e)
|
||||||
P.logger.error(traceback.format_exc())
|
P.logger.error(traceback.format_exc())
|
||||||
|
|||||||
Reference in New Issue
Block a user