implemented basic cache

This commit is contained in:
Hazel Noack 2025-06-11 11:46:42 +02:00
parent 36829906d0
commit 8196da840e
3 changed files with 38 additions and 1 deletions

View File

@ -36,7 +36,7 @@ def cli():
c.generate_headers()
print(c.session.headers)
print(c.get("http://ip.org/"))
if __name__ == "__main__":

29
python_requests/cache.py Normal file
View File

@ -0,0 +1,29 @@
from codecs import encode
from hashlib import sha1
from pathlib import Path
import requests
import pickle
from . import CACHE_DIRECTORY
def get_url_hash(url: str) -> str:
return sha1(encode(url.strip(), "utf-8")).hexdigest()
def get_url_file(url: str) -> Path:
return Path(CACHE_DIRECTORY, f"{get_url_hash(url)}.request")
def has_cache(url: str) -> bool:
return get_url_file(url).exists()
def get_cache(url: str) -> requests.Response:
with get_url_file(url).open("rb") as cache_file:
return pickle.load(cache_file)
def write_cache(url: str, resp: requests.Response):
with get_url_file(url).open("wb") as url_file:
pickle.dump(resp, url_file)

View File

@ -3,6 +3,7 @@ from typing import Optional
import requests
from urllib.parse import urlparse, urlunsplit, ParseResult
from . import cache
class Connection:
def __init__(self, session: Optional[requests.Session] = None) -> None:
@ -20,3 +21,10 @@ class Connection:
self.session.headers.update(**headers)
def get(self, url: str):
if cache.has_cache(url):
return cache.get_cache(url)
resp = self.session.get(url)
cache.write_cache(url, resp)
return resp