chore: Format using black

This commit is contained in:
PapaTutuWawa 2023-08-20 19:55:43 +02:00
parent d0ca73d8c6
commit 801d7d73d9
2 changed files with 88 additions and 65 deletions

View File

@ -13,6 +13,7 @@ import time
from optparse import OptionParser from optparse import OptionParser
import sys import sys
class Config: class Config:
config: dict[str, object] = None config: dict[str, object] = None
@ -37,6 +38,7 @@ class Config:
# Default: 1 day # Default: 1 day
return self.config.get("api_ttl", 1 * 24 * 60 * 60) return self.config.get("api_ttl", 1 * 24 * 60 * 60)
class Database: class Database:
__db = None __db = None
@ -45,15 +47,18 @@ class Database:
def initialize(self): def initialize(self):
cur = self.__db.cursor() cur = self.__db.cursor()
cur.execute(''' cur.execute(
"""
CREATE TABLE ApiCache ( CREATE TABLE ApiCache (
package TEXT NOT NULL PRIMARY KEY, package TEXT NOT NULL PRIMARY KEY,
payload TEXT NOT NULL, payload TEXT NOT NULL,
time INTEGER NOT NULL, time INTEGER NOT NULL,
success INTEGER NOT NULL success INTEGER NOT NULL
) )
''') """
cur.execute(''' )
cur.execute(
"""
CREATE TABLE PackageCache ( CREATE TABLE PackageCache (
package TEXT NOT NULL, package TEXT NOT NULL,
version TEXT NOT NULL, version TEXT NOT NULL,
@ -61,30 +66,32 @@ class Database:
path TEXT NOT NULL, path TEXT NOT NULL,
PRIMARY KEY (package, version) PRIMARY KEY (package, version)
) )
''') """
)
def get_api_cache(self, package: str): def get_api_cache(self, package: str):
cur = self.__db.cursor() cur = self.__db.cursor()
api = cur.execute('SELECT package, payload, time, success FROM ApiCache WHERE package = ?', (package,)).fetchone() api = cur.execute(
"SELECT package, payload, time, success FROM ApiCache WHERE package = ?",
(package,),
).fetchone()
if api: if api:
return ApiCacheEntry( return ApiCacheEntry(api[0], api[1], api[2], True if api[3] == 1 else False)
api[0],
api[1],
api[2],
True if api[3] == 1 else False
)
def persist_api_cache(self, item): def persist_api_cache(self, item):
cur = self.__db.cursor() cur = self.__db.cursor()
cur.execute( cur.execute(
'INSERT OR REPLACE INTO ApiCache VALUES (?, ?, ?, ?)', "INSERT OR REPLACE INTO ApiCache VALUES (?, ?, ?, ?)",
(item.package, item.payload, item.request_time, 1 if item.success else 0) (item.package, item.payload, item.request_time, 1 if item.success else 0),
) )
self.__db.commit() self.__db.commit()
def get_package_cache(self, package: str, version: str): def get_package_cache(self, package: str, version: str):
cur = self.__db.cursor() cur = self.__db.cursor()
api = cur.execute('SELECT package, version, time, path FROM PackageCache WHERE package = ? AND version = ?', (package, version)).fetchone() api = cur.execute(
"SELECT package, version, time, path FROM PackageCache WHERE package = ? AND version = ?",
(package, version),
).fetchone()
if api: if api:
return PackageCacheEntry( return PackageCacheEntry(
api[0], api[0],
@ -96,27 +103,31 @@ class Database:
def persist_package_cache(self, item): def persist_package_cache(self, item):
cur = self.__db.cursor() cur = self.__db.cursor()
cur.execute( cur.execute(
'INSERT INTO PackageCache VALUES (?, ?, ?, ?)', "INSERT INTO PackageCache VALUES (?, ?, ?, ?)",
(item.package, item.version, item.request_time, item.path) (item.package, item.version, item.request_time, item.path),
) )
self.__db.commit() self.__db.commit()
def patch_pubdev_api_response(resp: dict[str, object], package: str, config: Config): def patch_pubdev_api_response(resp: dict[str, object], package: str, config: Config):
# Patch the latest version # Patch the latest version
if 'latest' in resp: if "latest" in resp:
version = resp['latest']['version'] version = resp["latest"]["version"]
resp['latest']['archive_url'] = f'{config.server_url}/api/archives/{package}/{version}' resp["latest"][
"archive_url"
] = f"{config.server_url}/api/archives/{package}/{version}"
# Patch all other versions # Patch all other versions
new_versions = [] new_versions = []
for release in resp['versions']: for release in resp["versions"]:
version = release['version'] version = release["version"]
release['archive_url'] = f'{config.server_url}/api/archives/{package}/{version}' release["archive_url"] = f"{config.server_url}/api/archives/{package}/{version}"
new_versions.append(release) new_versions.append(release)
resp['versions'] = new_versions resp["versions"] = new_versions
return resp return resp
class PackageCacheEntry: class PackageCacheEntry:
package: str = None package: str = None
version: str = None version: str = None
@ -129,6 +140,7 @@ class PackageCacheEntry:
self.request_time = request_time self.request_time = request_time
self.path = path self.path = path
class ApiCacheEntry: class ApiCacheEntry:
package: str = None package: str = None
payload = None payload = None
@ -144,6 +156,7 @@ class ApiCacheEntry:
def is_valid(self, ttl): def is_valid(self, ttl):
return time.time() <= self.request_time + ttl return time.time() <= self.request_time + ttl
class PubApiCacheResource: class PubApiCacheResource:
__db: Database = None __db: Database = None
__config: Config = None __config: Config = None
@ -154,29 +167,32 @@ class PubApiCacheResource:
@logger.catch @logger.catch
async def on_get(self, req, resp, package): async def on_get(self, req, resp, package):
#breakpoint() # breakpoint()
cache = self.__db.get_api_cache(package) cache = self.__db.get_api_cache(package)
if not cache or not cache.is_valid(self.__config.api_ttl): if not cache or not cache.is_valid(self.__config.api_ttl):
logger.info(f'API response for {package} not in cache...') logger.info(f"API response for {package} not in cache...")
r = requests.get( r = requests.get(
f'https://pub.dev/api/packages/{package}', f"https://pub.dev/api/packages/{package}",
headers={ headers={
'Accept': 'application/vnd.pub.v2+json', "Accept": "application/vnd.pub.v2+json",
} },
) )
if r.status_code == 200: if r.status_code == 200:
payload = patch_pubdev_api_response(r.json(), package, self.__config) payload = patch_pubdev_api_response(r.json(), package, self.__config)
else: else:
payload = r.json() payload = r.json()
cache = ApiCacheEntry(package, json.dumps(payload), time.time(), r.status_code == 200) cache = ApiCacheEntry(
package, json.dumps(payload), time.time(), r.status_code == 200
)
self.__db.persist_api_cache(cache) self.__db.persist_api_cache(cache)
logger.debug(f'Payload: {cache.payload}') logger.debug(f"Payload: {cache.payload}")
resp.append_header('Content-Type', 'application/vnd.pub.v2+json') resp.append_header("Content-Type", "application/vnd.pub.v2+json")
resp.text = cache.payload resp.text = cache.payload
resp.status = falcon.HTTP_200 if cache.success else falcon.HTTP_404 resp.status = falcon.HTTP_200 if cache.success else falcon.HTTP_404
class PubPackageCacheResource: class PubPackageCacheResource:
__db: Database = None __db: Database = None
__config: Config = None __config: Config = None
@ -187,19 +203,22 @@ class PubPackageCacheResource:
@logger.catch @logger.catch
async def on_get(self, req, resp, package, version): async def on_get(self, req, resp, package, version):
#breakpoint() # breakpoint()
cache = self.__db.get_package_cache(package, version) cache = self.__db.get_package_cache(package, version)
if not cache or not os.path.exists(cache.path): if not cache or not os.path.exists(cache.path):
logger.info(f'{package}:{version} not in cache. Querying...') logger.info(f"{package}:{version} not in cache. Querying...")
package_path = os.path.join(self.__config.package_path, package) package_path = os.path.join(self.__config.package_path, package)
if not os.path.exists(package_path): if not os.path.exists(package_path):
os.mkdir(package_path) os.mkdir(package_path)
path = os.path.join(package_path, f'{version}.tar.gz') path = os.path.join(package_path, f"{version}.tar.gz")
with requests.get(f'https://pub.dartlang.org/packages/{package}/versions/{version}.tar.gz', stream=True) as r: with requests.get(
f"https://pub.dartlang.org/packages/{package}/versions/{version}.tar.gz",
stream=True,
) as r:
r.raise_for_status() r.raise_for_status()
with open(path, 'wb') as f: with open(path, "wb") as f:
for chunk in r.iter_content(chunk_size=8196): for chunk in r.iter_content(chunk_size=8196):
f.write(chunk) f.write(chunk)
cache = PackageCacheEntry( cache = PackageCacheEntry(
@ -211,10 +230,11 @@ class PubPackageCacheResource:
self.__db.persist_package_cache(cache) self.__db.persist_package_cache(cache)
resp.status = falcon.HTTP_200 resp.status = falcon.HTTP_200
resp.content_type = 'application/octet-stream' resp.content_type = "application/octet-stream"
resp.stream = await aiofiles.open(cache.path, 'rb') resp.stream = await aiofiles.open(cache.path, "rb")
resp.content_length = os.path.getsize(cache.path) resp.content_length = os.path.getsize(cache.path)
def main(): def main():
parser = OptionParser() parser = OptionParser()
parser.add_option( parser.add_option(
@ -242,17 +262,24 @@ def main():
if should_initialize: if should_initialize:
db.initialize() db.initialize()
logger.info(f'API calls have a TTL of {config.api_ttl} seconds') logger.info(f"API calls have a TTL of {config.api_ttl} seconds")
if not os.path.exists(config.package_path): if not os.path.exists(config.package_path):
logger.info('Creating packages directory...') logger.info("Creating packages directory...")
os.makedirs(config.package_path) os.makedirs(config.package_path)
app = falcon.asgi.App() app = falcon.asgi.App()
app.add_route('/api/packages/{package}', PubApiCacheResource(db, config)) app.add_route("/api/packages/{package}", PubApiCacheResource(db, config))
app.add_route('/api/archives/{package}/{version}', PubPackageCacheResource(db, config)) app.add_route(
"/api/archives/{package}/{version}", PubPackageCacheResource(db, config)
)
uvicorn.run(app, host=config.config.get('host', '127.0.0.1'), port=config.config.get('port', 8000)) uvicorn.run(
app,
host=config.config.get("host", "127.0.0.1"),
port=config.config.get("port", 8000),
)
if __name__ == '__main__':
if __name__ == "__main__":
main() main()

View File

@ -1,25 +1,21 @@
from setuptools import setup, find_packages from setuptools import setup, find_packages
setup( setup(
name='pubcached', name="pubcached",
version='0.1', version="0.1",
description='Caching server for pub.dev packages', description="Caching server for pub.dev packages",
author='Alexander \"PapaTutuWawa\"', author='Alexander "PapaTutuWawa"',
author_email='papatutuwawa [at] polynom.me', author_email="papatutuwawa [at] polynom.me",
install_requires=[ install_requires=[
'aiofiles>=23.1.0', "aiofiles>=23.1.0",
'requests>=2.29.0', "requests>=2.29.0",
'falcon>=3.1.1', "falcon>=3.1.1",
'loguru>=0.7.0', "loguru>=0.7.0",
'toml>=0.10.2', "toml>=0.10.2",
'uvicorn>=0.20.0' "uvicorn>=0.20.0",
], ],
packages=find_packages(), packages=find_packages(),
license='MIT', license="MIT",
zip_safe=True, zip_safe=True,
entry_points={ entry_points={"console_scripts": ["pubcached = pubcached.pubcached:main"]},
"console_scripts": [
"pubcached = pubcached.pubcached:main"
]
}
) )