diff --git a/python_requests/__main__.py b/python_requests/__main__.py index 692402f..be9a0f0 100644 --- a/python_requests/__main__.py +++ b/python_requests/__main__.py @@ -2,6 +2,15 @@ import argparse import logging from .connections import Connection +from . import cache + + +def main(): + c = Connection() + c.generate_headers() + print(c.session.headers) + + print(c.get("http://ip.org/")) def cli(): @@ -16,6 +25,22 @@ def cli(): help="Sets the logging level to debug." ) + # Cache management subcommands + subparsers = parser.add_subparsers(dest='cache_command', help='Cache management commands') + + # Show cache stats + show_parser = subparsers.add_parser('show-cache', help='Show cache statistics') + show_parser.set_defaults(func=handle_show_cache) + + # Clean cache (expired entries) + clean_parser = subparsers.add_parser('clean-cache', help='Clean expired cache entries') + clean_parser.set_defaults(func=handle_clean_cache) + + # Clear cache (all entries) + clear_parser = subparsers.add_parser('clear-cache', help='Clear ALL cache entries') + clear_parser.set_defaults(func=handle_clear_cache) + + args = parser.parse_args() # Configure logging based on the debug flag @@ -32,11 +57,46 @@ def cli(): ) - c = Connection() - c.generate_headers() - print(c.session.headers) + if hasattr(args, 'func'): + args.func(args) + else: + main() - print(c.get("http://ip.org/")) + +def handle_show_cache(args): + """Handle the show-cache command""" + try: + file_count, db_count = cache.get_cache_stats() + logging.info(f"Cache Statistics:") + logging.info(f" - Files in cache: {file_count}") + logging.info(f" - Database entries: {db_count}") + except Exception as e: + logging.error(f"Failed to get cache statistics: {str(e)}") + +def handle_clean_cache(args): + """Handle the clean-cache command""" + try: + files_deleted, entries_deleted = cache.clean_cache() + logging.info(f"Cleaned cache:") + logging.info(f" - Files deleted: {files_deleted}") + logging.info(f" - Database entries removed: {entries_deleted}") + except Exception as e: + logging.error(f"Failed to clean cache: {str(e)}") + +def handle_clear_cache(args): + """Handle the clear-cache command""" + try: + # Confirm before clearing all cache + confirm = input("Are you sure you want to clear ALL cache? This cannot be undone. [y/N]: ") + if confirm.lower() == 'y': + files_deleted, entries_deleted = cache.clear_cache() + logging.info(f"Cleared ALL cache:") + logging.info(f" - Files deleted: {files_deleted}") + logging.info(f" - Database entries removed: {entries_deleted}") + else: + logging.info("Cache clearing cancelled") + except Exception as e: + logging.error(f"Failed to clear cache: {str(e)}") if __name__ == "__main__": diff --git a/python_requests/cache.py b/python_requests/cache.py index b40df91..fb0af97 100644 --- a/python_requests/cache.py +++ b/python_requests/cache.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Optional, Tuple from codecs import encode from hashlib import sha1 from pathlib import Path @@ -99,3 +99,84 @@ def write_cache( (url_hash, expires_at.isoformat()) ) conn.commit() + + + +def clean_cache() -> Tuple[int, int]: + """ + Clean up expired cache entries. + Returns tuple of (files_deleted, db_entries_deleted) + """ + now = datetime.now() + files_deleted = 0 + db_entries_deleted = 0 + + with sqlite3.connect(DB_FILE) as conn: + # Get all expired entries + cursor = conn.cursor() + cursor.execute( + "SELECT url_hash FROM url_cache WHERE expires_at < ?", + (now.isoformat(),) + ) + expired_hashes = [row[0] for row in cursor.fetchall()] + + # Delete the files and count deletions + for url_hash in expired_hashes: + cache_file = Path(CACHE_DIRECTORY, f"{url_hash}.request") + try: + if cache_file.exists(): + cache_file.unlink() + files_deleted += 1 + except OSError: + continue + + # Delete database records and count deletions + cursor.execute( + "DELETE FROM url_cache WHERE expires_at < ?", + (now.isoformat(),) + ) + db_entries_deleted = cursor.rowcount + conn.commit() + + return (files_deleted, db_entries_deleted) + +def clear_cache() -> Tuple[int, int]: + """ + Clear ALL cache entries regardless of expiration. + Returns tuple of (files_deleted, db_entries_deleted) + """ + files_deleted = 0 + db_entries_deleted = 0 + + # Delete all cache files + for cache_file in Path(CACHE_DIRECTORY).glob("*.request"): + try: + cache_file.unlink() + files_deleted += 1 + except OSError: + continue + + # Delete all database entries + with sqlite3.connect(DB_FILE) as conn: + cursor = conn.cursor() + cursor.execute("DELETE FROM url_cache") + db_entries_deleted = cursor.rowcount + conn.commit() + + return (files_deleted, db_entries_deleted) + +def get_cache_stats() -> Tuple[int, int]: + """ + Get cache statistics. + Returns tuple of (total_files, total_db_entries) + """ + # Count cache files + total_files = len(list(Path(CACHE_DIRECTORY).glob("*.request"))) + + # Count database entries + with sqlite3.connect(DB_FILE) as conn: + cursor = conn.cursor() + cursor.execute("SELECT COUNT(*) FROM url_cache") + total_db_entries = cursor.fetchone()[0] + + return (total_files, total_db_entries)