# -*- coding: utf-8 -*- # --------------------------------------------------------------------------------- # Name: lis_grib2_v2.py # Purpose: This script will update the LIS mosaic dataset by removing all # rasters older than a specified time period. New images will be # downloaded and loaded into existing mosaic datasets. # The script was originally provided by Esri but has been modified # based on lessons learned and an effort to simplify the code. # # Author: Esri (v1), jshute (v2) # # Created: Revised on 09/14/2018 # Copyright: (c) NASA 2018 # # --------------------------------------------------------------------------------- # Import modules # --------------- import os import sys import socket import datetime import requests from requests.packages.urllib3.exceptions import InsecureRequestWarning import arcpy now = datetime.datetime.now() # Script parameters # ------------------ root_directory = "N:\\projects\\arcshare\\public\\disaster_response\\nrt_products\\lis_grib2\\" # "C:\\att\\project\\arcshare\\public\\disaster_response\\nrt_products\\lis_grib2\\" # product_name, output_directory product_list = [ ["lis_grib2", root_directory + "rasters_lis_grib2\\"] ] url = "https://nssrgeo.ndc.nasa.gov/SPoRT/modeling/lis/conus3km/geotiff/rsm_0-10cm/" url_suffix = "_0000_sport_lis_rsm0-10cm_conus3km_float_wgs84.tif" age_of_imagery = 30 # Determines retention time for imagery mosaic_dataset_fields = ["Name", "Date"] # Import script class files # -------------------------- sys.path.insert(0, root_directory + "classes\\") import UtilitiesGeneral import UtilitiesRaster # Disable url request warnings # ----------------------------- requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # Global logger settings # ----------------------- logger_timestamp = now.strftime("%Y%m%d_%H%M_%S") log_directory = root_directory + "logs" + os.sep log_file = log_directory + "lis_grib2_log_" + logger_timestamp + "_" + socket.gethostname() + ".log" # Main body of the script # ------------------------------------------------------------------------------- # Defines the entry point into the script def main(): # Set up the logger # ------------------ logger = UtilitiesGeneral.configure_logging(log_file) logger.info("Logger configuration complete") logger.info("log_directory ==> " + log_directory) logger.info("log_file location ==> " + log_file) logger.info("Script start time ==> " + now.strftime("%I:%M:%S %p")) # Verify class file connectivity is working # ------------------------------------------ utilities_general_test = "Checking connection to UtilitiesGeneral file ... " logger.info("File check ==> " + utilities_general_test) utilities_general_result = UtilitiesGeneral.connection_test(utilities_general_test) logger.info("Result ==> " + utilities_general_result) utilities_raster_test = "Checking connection to UtilitiesRaster file ... " logger.info("File check ==> " + utilities_raster_test) utilities_raster_result = UtilitiesRaster.connection_test(utilities_raster_test) logger.info("Result ==> " + utilities_raster_result) # Check script parameters # ------------------------ logger.info("Listing script parameters ... ") logger.info("Script start time ==> " + now.strftime("%Y%m%d_%H%M_%S")) logger.info("root_directory ==> " + root_directory) logger.info("product_list ==> " + str(product_list)) logger.info("url ==> " + url) logger.info("url_suffix ==> " + url_suffix) logger.info("age_of_imagery ==> " + str(age_of_imagery)) logger.info("mosaic_dataset_fields ==> " + str(mosaic_dataset_fields)) # Configure dates # ---------------- logger.info("Configuring dates...") # Remove date today minus the age_of_imagery parameter # ----------------------------------------------------- remove_date = (now - datetime.timedelta(days=age_of_imagery)).strftime("%Y%m%d") logger.info("remove_date parameter ==> " + str(remove_date)) remove_date_query = str(now - datetime.timedelta(days=age_of_imagery)).split(".")[0] logger.info("remove_date_query parameter ==> " + remove_date_query) logger.info("done") # Begin product loop # ------------------- logger.info("Beginning product loop...") for product in product_list: logger.info("================================================================================") logger.info("Beginning processing for " + product[0]) logger.info("================================================================================") # Set product_type variables # --------------------------- product_name = product[0] output_directory = product[1] logger.info("product_name ==> " + product_name) logger.info("output_directory ==> " + output_directory) mosaic_dataset_full_path = root_directory + "data_" + product_name + ".gdb\\" + product_name logger.info("mosaic_dataset_full_path ==> " + mosaic_dataset_full_path) # Find out if directory is empty. If it is empty, set initial run to true # ------------------------------------------------------------------------ file_list = os.listdir(output_directory) if len(file_list) > 0: initial_run = False logger.info("Files already exist in the directory. Adding new files only (initial_run = {})" .format(str(initial_run))) else: initial_run = True logger.info("No files in the directory. Adding all files (initial_run = {})".format(str(initial_run))) # Create list of dates from now back to age_of_imagery # ------------------------------------------------------- logger.info("Creating list of dates from now back to age_of_imagery ({} days)".format(age_of_imagery)) date_list = [now - datetime.timedelta(days=x) for x in range(0, age_of_imagery)] logger.info("done") for unique_date in date_list: unique_date = unique_date.strftime("%Y%m%d") logger.info("Beginning processing for date ==> " + unique_date) # Set the name and path of the file to be downloaded # --------------------------------------------------- file_name = unique_date + url_suffix out_tif = output_directory + file_name # Check to see if file already exists in the directory # ----------------------------------------------------- if not os.path.isfile(out_tif): # Download the file # ------------------ logger.info("Downloading and writing file to output directory ==> " + out_tif) response = requests.get(url + file_name, verify=False, stream=True) # Write file to output directory # ------------------------------- with open(out_tif, "wb") as f: f.write(response.content) logger.info("done") if not initial_run: # Add image file to the mosaic dataset # ------------------------------------- logger.info("Adding image file to the mosaic dataset ==> " + mosaic_dataset_full_path) try: arcpy.AddRastersToMosaicDataset_management(mosaic_dataset_full_path, "Raster Dataset", out_tif, "UPDATE_CELL_SIZES", "NO_BOUNDARY", "NO_OVERVIEWS", duplicate_items_action="EXCLUDE_DUPLICATES", build_pyramids="BUILD_PYRAMIDS", calculate_statistics="CALCULATE_STATISTICS", estimate_statistics="ESTIMATE_STATISTICS") logger.info("Image file added to the mosaic dataset") except Exception as e: logger.error("Unable to add " + out_tif + " image file to the mosaic dataset - " + str(e)) # Add all files to mosaic if this is the initial run # --------------------------------------------------- if initial_run: # Add all image files to the mosaic dataset # ------------------------------------------ logger.info("Adding all image files to the mosaic dataset ==> " + mosaic_dataset_full_path) try: arcpy.AddRastersToMosaicDataset_management(mosaic_dataset_full_path, "Raster Dataset", output_directory, "UPDATE_CELL_SIZES", "NO_BOUNDARY", "NO_OVERVIEWS", duplicate_items_action="EXCLUDE_DUPLICATES", build_pyramids="BUILD_PYRAMIDS", calculate_statistics="CALCULATE_STATISTICS", estimate_statistics="ESTIMATE_STATISTICS") logger.info("All image files from the directory added to the mosaic dataset") except Exception as e: logger.error("Unable to add " + output_directory + " image files to the mosaic dataset - " + str(e)) logger.info("Image file parsing and processing done ({})".format(product_name)) # Update attributes in mosaic dataset table # ------------------------------------------ logger.info("Updating attributes in the mosaic dataset table for " + product_name) with arcpy.da.UpdateCursor(mosaic_dataset_full_path, mosaic_dataset_fields) as cursor: for row in cursor: image_date = row[0][0:4] + "/" + row[0][4:6] + "/" + row[0][6:8] row[1] = image_date cursor.updateRow(row) # logger.info("Mosaic dataset row updated: Image name({}), Image date({})" # .format(row[0], image_date)) # <====== Spams the log file del cursor logger.info("done") # Remove directory images older than the age_of_imagery parameter # ---------------------------------------------------------------- logger.info("Removing directory images older than " + str(age_of_imagery) + " days for " + product_name) for root, dirs, files in os.walk(output_directory): for directory_file in files: file_name_split = directory_file.split(".")[0] file_date = file_name_split[0:8] if file_date < remove_date: logger.warning("Removing file " + root + directory_file) os.remove(root + directory_file) # arcpy.Delete_management(root + directory_file) logger.info("done") # Set the where clause for mosaic dataset deletions # -------------------------------------------------- where_clause = "Date < date '" + remove_date_query + "' OR Date IS NULL" logger.info("where clause for mosaic dataset deletions ==> " + where_clause) # Remove mosaic dataset images older than the age_of_imagery parameter # --------------------------------------------------------------------- logger.info("Removing mosaic dataset images older than " + str(age_of_imagery) + " days for " + product_name) try: arcpy.RemoveRastersFromMosaicDataset_management(mosaic_dataset_full_path, where_clause, update_boundary="UPDATE_BOUNDARY", mark_overviews_items="MARK_OVERVIEW_ITEMS", delete_overview_images="DELETE_OVERVIEW_IMAGES", delete_item_cache="DELETE_ITEM_CACHE", remove_items="REMOVE_MOSAICDATASET_ITEMS", update_cellsize_ranges="UPDATE_CELL_SIZES") logger.info("done") except Exception as e: logger.error("Unable to remove mosaic dataset images with the following where clause for " + product_name + ": " + str(where_clause) + " - " + str(e)) # Repair GPM mosaic dataset paths # -------------------------------- logger.info("Repairing mosaic dataset paths for " + product_name) UtilitiesRaster.repair_mosaic_paths(logger, mosaic_dataset_full_path, output_directory) # Set mosaic dataset NoData values # --------------------------------- logger.info("Setting mosaic dataset NoData values") try: arcpy.DefineMosaicDatasetNoData_management(mosaic_dataset_full_path, "1", "BAND_1 '0 9999'", "", "", "NO_COMPOSITE_NODATA") logger.info("done") except Exception as e: logger.error("Unable to set GPM mosaic dataset NoData values - " + str(e)) logger.info("Product loop done for {}".format(product_name)) logger.info("Script complete") # Script start # ------------- if __name__ == "__main__": sys.exit(main())