Compare commits

..

No commits in common. "6a0427f9752d631af14f93f806f7b5e19fc2a9f8" and "588c9105cca0314ebb24ebf03681db1ee3f3bfce" have entirely different histories.

3 changed files with 48 additions and 66 deletions

1
.gitignore vendored
View File

@ -1,4 +1,3 @@
venv/ venv/
*.egg-info *.egg-info
**/__pycache__/ **/__pycache__/
build/

View File

@ -10,12 +10,7 @@ class Download:
quality: str quality: str
filename: str filename: str
def download( def download(self, cookies: dict[str, str], destination_dir: Path):
self,
cookies: dict[str, str],
destination_dir: Path,
filename: str | None = None,
):
"""Downloads the episode into the directory @destination_dir. The cookies from @cookies are used """Downloads the episode into the directory @destination_dir. The cookies from @cookies are used
during the request.""" during the request."""
with requests.get( with requests.get(
@ -28,7 +23,6 @@ class Download:
) as r: ) as r:
r.raise_for_status() r.raise_for_status()
fname = filename or self.filename with open(destination_dir / self.filename, "wb") as f:
with open(destination_dir / fname, "wb") as f:
for chunk in r.iter_content(chunk_size=8196): for chunk in r.iter_content(chunk_size=8196):
f.write(chunk) f.write(chunk)

View File

@ -5,10 +5,53 @@ import re
import requests import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from akibapass_downloader.const import LOGIN_URL, BASE_URL from akibapass_downloader.const import LOGIN_URL
def _check_login(soup: BeautifulSoup) -> bool: def login(email: str, password: str) -> dict[str, str] | None:
"""Performs a login on the Akibapass site and returns a set of cookies
to use while performing other requests."""
login_page_req = requests.get(LOGIN_URL)
if login_page_req.status_code != 200:
return None
page = BeautifulSoup(login_page_req.text, "html.parser")
# Find the CSRF token's value.
csrf_token = page.find("meta", attrs={"name": "csrf-token"}).attrs["content"]
# Find the CSRD token's name.
csrf_param = page.find("meta", attrs={"name": "csrf-param"}).attrs["content"]
r = requests.post(
LOGIN_URL,
cookies={
# Use the session we received on the login page.
"_session": login_page_req.cookies["_session"],
},
data={
"email": email,
"password": password,
csrf_param: csrf_token,
"utf8": "",
},
)
if r.status_code != 200:
return None
return {
"_session": r.cookies["_session"],
}
def is_logged_in(cookies: dict[str, str]) -> bool:
"""Checks if the cookies in @cookies are stil valid."""
r = requests.get(
LOGIN_URL,
cookies=cookies,
)
if r.status_code != 200:
return False
soup = BeautifulSoup(r.text, "html.parser")
scripts = soup.find_all("script") scripts = soup.find_all("script")
user_data_matcher = re.compile(r".*window._current_user\s*=\s*(\{.*?\}).*") user_data_matcher = re.compile(r".*window._current_user\s*=\s*(\{.*?\}).*")
for script in scripts: for script in scripts:
@ -34,57 +77,3 @@ def _check_login(soup: BeautifulSoup) -> bool:
return data != {} and data.get("id") is not None return data != {} and data.get("id") is not None
return False return False
def login(email: str, password: str) -> dict[str, str] | None:
"""Performs a login on the Akibapass site and returns a set of cookies
to use while performing other requests."""
s = requests.Session()
login_page_req = s.get(LOGIN_URL)
if login_page_req.status_code != 200:
return None
page = BeautifulSoup(login_page_req.text, "html.parser")
# Find the CSRF token's value.
csrf_token = page.find("meta", attrs={"name": "csrf-token"}).attrs["content"]
# Find the CSRD token's name.
csrf_param = page.find("meta", attrs={"name": "csrf-param"}).attrs["content"]
r = s.post(
LOGIN_URL,
headers={
"Referer": LOGIN_URL,
"Content-Type": "application/x-www-form-urlencoded",
"Origin": BASE_URL,
"DNT": "1",
},
data={
"email": email,
"password": password,
csrf_param: csrf_token,
"utf8": "",
},
)
if r.status_code != 200:
return None
if not _check_login(BeautifulSoup(r.text, "html.parser")):
return None
return {
"_session": r.cookies["_session"],
}
def is_logged_in(cookies: dict[str, str]) -> dict[str, str]:
"""Checks if the cookies in @cookies are stil valid."""
r = requests.get(
LOGIN_URL,
cookies=cookies,
headers={
"Referer": LOGIN_URL,
},
)
if r.status_code != 200:
return False
return _check_login(BeautifulSoup(r.text, "html.parser"))