Initial commit
This commit is contained in:
commit
b32243a67d
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
venv/
|
||||
*.egg-info
|
||||
**/__pycache__/
|
9
README.md
Normal file
9
README.md
Normal file
@ -0,0 +1,9 @@
|
||||
# akibapass_downloader
|
||||
|
||||
A library that allows logging in and parsing the Akibapass site so you can download
|
||||
your library. This is only tested (and probably only works) on Anime series you
|
||||
have bought (and not rented).
|
||||
|
||||
## License
|
||||
|
||||
See [`LICENSE`](./LICENSE).
|
0
akibapass_downloader/__init__.py
Normal file
0
akibapass_downloader/__init__.py
Normal file
2
akibapass_downloader/const.py
Normal file
2
akibapass_downloader/const.py
Normal file
@ -0,0 +1,2 @@
|
||||
# Base URL of the Akibapass service
|
||||
BASE_URL = "https://akibapasstv.vhx.tv"
|
28
akibapass_downloader/download.py
Normal file
28
akibapass_downloader/download.py
Normal file
@ -0,0 +1,28 @@
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
@dataclass
|
||||
class Download:
|
||||
url: str
|
||||
quality: str
|
||||
filename: str
|
||||
|
||||
def download(self, cookies: dict[str, str], destination_dir: Path):
|
||||
"""Downloads the episode into the directory @destination_dir. The cookies from @cookies are used
|
||||
during the request."""
|
||||
with requests.get(
|
||||
self.url,
|
||||
cookies=cookies,
|
||||
headers={
|
||||
"Referrer": self.url,
|
||||
},
|
||||
stream=True,
|
||||
) as r:
|
||||
r.raise_for_status()
|
||||
|
||||
with open(destination_dir / self.filename, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=8196):
|
||||
f.write(chunk)
|
102
akibapass_downloader/episode.py
Normal file
102
akibapass_downloader/episode.py
Normal file
@ -0,0 +1,102 @@
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass
|
||||
import re
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
|
||||
from akibapass_downloader.const import BASE_URL
|
||||
from akibapass_downloader.download import Download
|
||||
|
||||
|
||||
class Quality(Enum):
|
||||
SD_240P = "240p"
|
||||
SD_360P = "360p"
|
||||
SD_540P = "540p"
|
||||
HD_720P = "720p"
|
||||
HD_1080P = "1080p"
|
||||
UHD_1440P = "1440p"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Episode:
|
||||
# The URL of the episode.
|
||||
url: str
|
||||
|
||||
# The title of the episode.
|
||||
name: str
|
||||
|
||||
# The episode numer.
|
||||
episode_nr: int
|
||||
|
||||
def get_downloads(
|
||||
self, cookies: dict[str, str], filter_quality: Quality | None = None
|
||||
) -> list[Download]:
|
||||
"""Requests the page and fetches the download links for the episode. @cookies are used for cookies."""
|
||||
r = requests.get(
|
||||
self.url,
|
||||
cookies=cookies,
|
||||
headers={
|
||||
"Referrer": self.url,
|
||||
},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
# Find the episode title
|
||||
title = soup.find_all("h1", class_="video-title")[0].strong.string
|
||||
|
||||
# Find the different downloads
|
||||
dropdown = soup.find_all("div", class_="dropdown-list")[0]
|
||||
scroll = dropdown.ul
|
||||
assert scroll.attrs["class"] == ["scrolling"]
|
||||
downloads = []
|
||||
for download in scroll.find_all("li"):
|
||||
a = download.a
|
||||
|
||||
quality = (
|
||||
a.string.strip()
|
||||
.split("—")[0]
|
||||
.rstrip()
|
||||
.replace("SD ", "")
|
||||
.replace("HD ", "")
|
||||
)
|
||||
if filter_quality is not None:
|
||||
if quality != filter_quality.value:
|
||||
continue
|
||||
|
||||
downloads.append(
|
||||
Download(
|
||||
url=f"{BASE_URL}{a.attrs['href']}",
|
||||
quality=quality,
|
||||
filename=f"{title} ({quality}).mp4",
|
||||
)
|
||||
)
|
||||
|
||||
return downloads
|
||||
|
||||
|
||||
def list_episodes(base_url: str) -> list[Episode]:
|
||||
"""Fetches episodes for @base_url."""
|
||||
r = requests.get(base_url)
|
||||
assert r.status_code == 200
|
||||
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
episode_nr_matcher = re.compile(r".*S[0-9]+E([0-9]+).*")
|
||||
episodes = []
|
||||
for li in soup.find_all("li", class_="item-type-video"):
|
||||
url = li.find_all("a", class_="browse-item-link")[0]
|
||||
img = url.find_all("img")[0]
|
||||
name_raw = img.attrs["alt"]
|
||||
match = episode_nr_matcher.match(name_raw)
|
||||
|
||||
episodes.append(
|
||||
Episode(
|
||||
url=f"{BASE_URL}{url.attrs['href']}",
|
||||
name=name_raw,
|
||||
episode_nr=int(match.group(1)),
|
||||
)
|
||||
)
|
||||
return episodes
|
38
akibapass_downloader/login.py
Normal file
38
akibapass_downloader/login.py
Normal file
@ -0,0 +1,38 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from akibapass_downloader.const import BASE_URL
|
||||
|
||||
|
||||
def login(email: str, password: str) -> dict[str, str] | None:
|
||||
"""Performs a login on the Akibapass site and returns a set of cookies
|
||||
to use while performing other requests."""
|
||||
login_page_req = requests.get(f"{BASE_URL}/login")
|
||||
if login_page_req.status_code != 200:
|
||||
return None
|
||||
|
||||
page = BeautifulSoup(login_page_req.text, "html.parser")
|
||||
# Find the CSRF token's value.
|
||||
csrf_token = page.find("meta", attrs={"name": "csrf-token"}).attrs["content"]
|
||||
# Find the CSRD token's name.
|
||||
csrf_param = page.find("meta", attrs={"name": "csrf-param"}).attrs["content"]
|
||||
|
||||
r = requests.post(
|
||||
f"{BASE_URL}/login",
|
||||
cookies={
|
||||
# Use the session we received on the login page.
|
||||
"_session": login_page_req.cookies["_session"],
|
||||
},
|
||||
data={
|
||||
"email": email,
|
||||
"password": password,
|
||||
csrf_param: csrf_token,
|
||||
"utf8": "✓",
|
||||
},
|
||||
)
|
||||
if r.status_code != 200:
|
||||
return None
|
||||
|
||||
return {
|
||||
"_session": r.cookies["_session"],
|
||||
}
|
17
pyproject.toml
Normal file
17
pyproject.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[project]
|
||||
name = "akibapass_downloader"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"beautifulsoup4",
|
||||
"requests"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"black",
|
||||
"pylint"
|
||||
]
|
||||
|
||||
[tool.pylint."MESSAES CONTROL"]
|
||||
max-line-length=120
|
||||
disable = "missing-class-docstring,missing-module-docstring,missing-function-docstring"
|
Loading…
Reference in New Issue
Block a user