Add a method for checking the validity of the cookies

This commit is contained in:
PapaTutuWawa 2023-12-16 18:26:41 +01:00
parent b32243a67d
commit 588c9105cc
2 changed files with 48 additions and 4 deletions

View File

@ -1,2 +1,5 @@
# Base URL of the Akibapass service # Base URL of the Akibapass service.
BASE_URL = "https://akibapasstv.vhx.tv" BASE_URL = "https://akibapasstv.vhx.tv"
# URL of Akibapass' login page.
LOGIN_URL = f"{BASE_URL}/login"

View File

@ -1,13 +1,17 @@
import json
import json.decoder
import re
import requests import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from akibapass_downloader.const import BASE_URL from akibapass_downloader.const import LOGIN_URL
def login(email: str, password: str) -> dict[str, str] | None: def login(email: str, password: str) -> dict[str, str] | None:
"""Performs a login on the Akibapass site and returns a set of cookies """Performs a login on the Akibapass site and returns a set of cookies
to use while performing other requests.""" to use while performing other requests."""
login_page_req = requests.get(f"{BASE_URL}/login") login_page_req = requests.get(LOGIN_URL)
if login_page_req.status_code != 200: if login_page_req.status_code != 200:
return None return None
@ -18,7 +22,7 @@ def login(email: str, password: str) -> dict[str, str] | None:
csrf_param = page.find("meta", attrs={"name": "csrf-param"}).attrs["content"] csrf_param = page.find("meta", attrs={"name": "csrf-param"}).attrs["content"]
r = requests.post( r = requests.post(
f"{BASE_URL}/login", LOGIN_URL,
cookies={ cookies={
# Use the session we received on the login page. # Use the session we received on the login page.
"_session": login_page_req.cookies["_session"], "_session": login_page_req.cookies["_session"],
@ -36,3 +40,40 @@ def login(email: str, password: str) -> dict[str, str] | None:
return { return {
"_session": r.cookies["_session"], "_session": r.cookies["_session"],
} }
def is_logged_in(cookies: dict[str, str]) -> bool:
"""Checks if the cookies in @cookies are stil valid."""
r = requests.get(
LOGIN_URL,
cookies=cookies,
)
if r.status_code != 200:
return False
soup = BeautifulSoup(r.text, "html.parser")
scripts = soup.find_all("script")
user_data_matcher = re.compile(r".*window._current_user\s*=\s*(\{.*?\}).*")
for script in scripts:
raw_content = "".join(script.contents)
content = "".join([line.strip() for line in raw_content.split("\n")])
if "window._current_user" not in content:
continue
match = user_data_matcher.match(content)
if match:
try:
# Use a try to guard against Akibapass doing something like
# `window._current_user = null;`, which is obviously not really JSON.
# (Or maybe it is?).
data = json.loads(match.group(1))
except json.decoder.JSONDecodeError:
continue
# Check that Akibapass doesn't do something like
# `window._current_user = {}`. Also, the variable is set, even when
# not logged in. But then the "id" attribute of the user is set to
# None.
return data != {} and data.get("id") is not None
return False