Compare commits

..

No commits in common. "0ebb27729dc17d3c2f29758064dd256fe19f6427" and "3765c78de0637a0c90a17ec433dbbec88df15adf" have entirely different histories.

2 changed files with 7 additions and 14 deletions

View File

@ -1,8 +1,7 @@
![753 Data Sync logo](https://git.nickhepler.cloud/nick/753-Data-Sync/raw/branch/master/logo.png)
# 753 Data Sync
![Gitea Release](https://img.shields.io/gitea/v/release/nick/753-Data-Sync?gitea_url=https%3A%2F%2Fgit.nickhepler.cloud%2F&style=for-the-badge&logo=Python)
![Gitea Issues](https://img.shields.io/gitea/issues/open/nick/753-Data-Sync?gitea_url=https%3A%2F%2Fgit.nickhepler.cloud%2F&labels=enhancement&style=for-the-badge&logo=Gitea&label=Enhancements)
![Gitea Issues](https://img.shields.io/gitea/issues/open/nick/753-Data-Sync?gitea_url=https%3A%2F%2Fgit.nickhepler.cloud%2F&labels=bug&style=for-the-badge&logo=Gitea&label=Defects)
![Gitea Release](https://img.shields.io/gitea/v/release/nick/753-Data-Sync?gitea_url=https%3A%2F%2Fgit.nickhepler.cloud)
![Gitea Open Issues](https://img.shields.io/gitea/issues/open/nick/753-Data-Sync?gitea_url=https%3A%2F%2Fgit.nickhepler.cloud)
This script fetches enforcement data from an external API, truncates a specified feature layer in ArcGIS, and adds the fetched data as features to the layer. The script performs the following tasks:

16
app.py
View File

@ -4,11 +4,9 @@ import sys
import os
import json
from datetime import datetime
from datetime import timedelta
import argparse
import urllib.parse
from dotenv import load_dotenv
import time
# Load environment variables from .env file
load_dotenv("753DataSync.env")
@ -261,8 +259,6 @@ def add_features(token, hostname, instance, fs, layer, aggregated_data, secure=T
def main():
"""Main entry point for the script."""
start_time = time.time()
try:
logger.info("Starting script execution.")
@ -309,11 +305,11 @@ def main():
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
page_filename = f"data/enforcement_page_{page_number}_results_{results_per_page}_{timestamp}.json"
# Save individual page data if in DEBUG mode
# Save individual page data
if log_level == 'DEBUG':
save_json(data, page_filename)
# Stop if last page
# Check if the number of records is less than the results_per_page, indicating last page
if len(data) < results_per_page:
logger.info("No more data to fetch, stopping pagination.")
break
@ -324,22 +320,20 @@ def main():
break
# Prepare aggregated data
aggregated_data = all_data
aggregated_data = all_data # Just use the collected features directly
# Save aggregated data
# Save aggregated data to a single JSON file
aggregated_filename = f"data/aggregated_enforcement_results_{timestamp}.json"
logger.info(f"Saving aggregated data to {aggregated_filename}.")
save_json(aggregated_data, aggregated_filename)
# Add the features to the feature layer
response = add_features(token, hostname, instance, fs, layer, aggregated_data)
except Exception as e:
logger.error(f"An unexpected error occurred: {e}", exc_info=True)
return
finally:
elapsed_time = timedelta(seconds=time.time() - start_time)
logger.info(f"Script execution completed in {str(elapsed_time)}.")
logger.info("Script execution completed.")
if __name__ == "__main__":
main()