From 6f412348fe32d307502f2e683268b5cb8bd519fd Mon Sep 17 00:00:00 2001 From: Nick Heppler Date: Tue, 15 Apr 2025 17:53:02 -0400 Subject: [PATCH] feat(logging): add dynamic log filenames and support for PURGE_DAYS cleanup. --- app.py | 42 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index 0a8e844..04bd288 100644 --- a/app.py +++ b/app.py @@ -17,6 +17,10 @@ load_dotenv("753DataSync.env") BASE_URL = "{}/{}/{}" log_level = os.getenv('LOG_LEVEL', 'INFO').upper() +# Get the current date for dynamic log file naming +current_date = datetime.now().strftime("%Y-%m-%d") +log_filename = f"753DataSync_{current_date}.log" + # Setup logging logger = logging.getLogger() @@ -34,8 +38,8 @@ elif log_level == 'CRITICAL': else: logger.setLevel(logging.INFO) -# File handler -file_handler = logging.FileHandler('753DataSync.log') +# File handler for dynamic log file +file_handler = logging.FileHandler(log_filename) file_handler.setLevel(getattr(logging, log_level)) # Stream handler (console output) @@ -51,6 +55,35 @@ stream_handler.setFormatter(formatter) logger.addHandler(file_handler) logger.addHandler(stream_handler) +def purge_old_files(purge_days): + """Purge log and data files older than PURGE_DAYS from the 'data' folder.""" + data_folder = 'data' + log_folder = '.' # Log files are in the current directory + + if not os.path.exists(data_folder): + logger.warning(f"The '{data_folder}' folder does not exist.") + return + + purge_threshold = datetime.now() - timedelta(days=purge_days) + + # Delete old log files + for filename in os.listdir(log_folder): + if filename.endswith(".log"): + file_path = os.path.join(log_folder, filename) + file_modified_time = datetime.fromtimestamp(os.path.getmtime(file_path)) + if file_modified_time < purge_threshold: + logger.info(f"Deleting old log file: {file_path}") + os.remove(file_path) + + # Delete old data files + for filename in os.listdir(data_folder): + file_path = os.path.join(data_folder, filename) + if filename.endswith(".json"): + file_modified_time = datetime.fromtimestamp(os.path.getmtime(file_path)) + if file_modified_time < purge_threshold: + logger.info(f"Deleting old data file: {file_path}") + os.remove(file_path) + def fetch_data(api_url, page_number, results_per_page): """Fetches data from the API and returns the response.""" url = BASE_URL.format(api_url, page_number, results_per_page) @@ -266,6 +299,11 @@ def main(): try: logger.info("Starting script execution.") + # Check and purge old files before processing + purge_days = int(os.getenv("PURGE_DAYS", 30)) # Default to 30 days if not set + logger.info(f"Purging files older than {purge_days} days.") + purge_old_files(purge_days) + # Parse command-line arguments results_per_page = parse_arguments() logger.info(f"Parsed arguments: results_per_page={results_per_page}")