# -*- coding: utf-8 -*- # --------------------------------------------------------------------------------- # Name: MODIS_Flood_v4.py # Purpose: This script will update the MODIS Flood mosaic datasets by removing # all rasters older than a specified time period. New images will be # downloaded and loaded into existing mosaic datasets. # The script was originally provided by Esri but has been modified # based on lessons learned and an effort to simplify the code. # # Author: Esri (v1), jshute (v2), gwlayne (v3), jclarke (v4) # # Created: Revised on 07/20/21 # Copyright: (c) NASA 2018 # # --------------------------------------------------------------------------------- # Import modules # --------------- import os import sys import socket import datetime import requests from requests.packages.urllib3.exceptions import InsecureRequestWarning import arcpy now = datetime.datetime.now() # Script parameters # ------------------ # root_directory = "C:\\att\\project\\arcshare\\public\\disaster_response\\nrt_products\\modis_flood\\" root_directory = "C:\\att\\project\\arcshare\\disasters_public\\nrt\\modis_flood\\" coordinate_file_name = "coordinate_list3.csv" # TOKEN** There is not currently a variable with the token in it. Must directly change the token after 90 Days product_list = [ ["modis_flood_1_day", "MCDWD_L3_F1_NRT"], ["modis_flood_2_day", "MCDWD_L3_F2_NRT"], ["modis_flood_3_day", "MCDWD_L3_F3_NRT"] ] # url = "https://nrt3.modaps.eosdis.nasa.gov/archive/allData/61/" url = "https://nrt3.modaps.eosdis.nasa.gov/api/v2/content/archives/allData/61/" # alternate url = "https://nrt3.modaps.eosdis.nasa.gov/api/v2/content/archives/allData/61/" age_of_imagery = 30 # Determines retention time for imagery mosaic_dataset_fields = ["name", "Date", "dateTXT", "tile", "Product"] # Just and identifier (keep) http_request_timeout = 6.05 # Specifies how long the request will wait before timing out # Import script class files # -------------------------- sys.path.insert(0, root_directory + "classes\\") import UtilitiesGeneral import UtilitiesRaster # Disable url request warnings # ----------------------------- requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # Refresh Publishing Tool Package import # -------------------------------------- # The refresh servece requres a connection made from arc catalog to the server which hosts the webservice. # If this connection is interupted the time bar won't be refreshed, and could cause the script to not run. # Make sure the connectionis established in arc Catalog # ---Directions for ArcCatalog----- # Open ArcCatalog and naviage to the folder this file is in. # To update pasword, right click on the agso4 file-> properties-> fill in password-> apply # To create a new connection right click in the file -> new-> ArcGIS Server connection-> Admin-> put in the url: https://maps.disasters.nasa.gov/ags04/admin and fill in your credentails. # below is a current connection. ****NOTE that "; System/PublishingTools" is needed at the end of the file name arcpy.ImportToolbox('C:\\att\\project\\arcshare\\disasters_public\\nrt\\modis_flood\\ags04 on maps.disasters.nasa.gov (admin).ags; System/PublishingTools') # Global logger settings # ----------------------- logger_timestamp = now.strftime("%Y%m%d_%H%M_%S") log_directory = root_directory + "logs" + os.sep log_file = log_directory + "modis_flood_log_" + logger_timestamp + "_" + socket.gethostname() + ".log" # Main body of the script # ------------------------------------------------------------------------------- # Defines the entry point into the script def main(): # Set up the logger # ------------------ logger = UtilitiesGeneral.configure_logging(log_file) logger.info("Logger configuration complete") logger.info("log_directory ==> " + log_directory) logger.info("log_file location ==> " + log_file) logger.info("Script start time ==> " + now.strftime("%I:%M:%S %p")) # Verify class file connectivity is working # ------------------------------------------ utilities_general_test = "Checking connection to UtilitiesGeneral file ... " logger.info("File check ==> " + utilities_general_test) utilities_general_result = UtilitiesGeneral.connection_test(utilities_general_test) logger.info("Result ==> " + utilities_general_result) utilities_raster_test = "Checking connection to UtilitiesRaster file ... " logger.info("File check ==> " + utilities_raster_test) utilities_raster_result = UtilitiesRaster.connection_test(utilities_raster_test) logger.info("Result ==> " + utilities_raster_result) # Check script parameters # ------------------------ logger.info("Listing script parameters ... ") logger.info("Script start time ==> " + now.strftime("%Y%m%d_%H%M_%S")) logger.info("root_directory ==> " + root_directory) logger.info("coordinate_file_name ==> " + coordinate_file_name) logger.info("product_list ==> " + str(product_list)) logger.info("url ==> " + url) logger.info("age_of_imagery ==> " + str(age_of_imagery)) logger.info("mosaic_dataset_fields ==> " + str(mosaic_dataset_fields)) logger.info("http_request_timeout ==> " + str(http_request_timeout)) # Configure dates # ---------------- logger.info("Configuring dates...") # Remove date today minus the age_of_imagery parameter # ----------------------------------------------------- remove_date = (now - datetime.timedelta(days=age_of_imagery)).strftime("%Y%m%d") logger.info("remove_date parameter ==> " + str(remove_date)) remove_date_query = str(now - datetime.timedelta(days=age_of_imagery)).split(" ")[0] logger.info("remove_date_query parameter ==> " + remove_date_query) logger.info("done") # Import coordinates from coordinate file # ---------------------------------------- coordinate_list = UtilitiesGeneral.import_from_csv(logger, root_directory + coordinate_file_name) # Create list of dates from now back to age_of_imagery # ------------------------------------------------------- logger.info("Creating list of dates from now back to age_of_imagery ({} days)".format(age_of_imagery)) date_list = [(now - datetime.timedelta(days=1)) - datetime.timedelta(days=x) for x in range(0, age_of_imagery)] logger.info("done") # Begin product loop # ------------------- logger.info("Beginning product loop...") for product in product_list: logger.info("================================================================================") logger.info("Beginning processing for " + product[0]) logger.info("================================================================================") # Set product_type variables # --------------------------- product_name = product[0] product_prefix = product[1] # primary_name_extension = product[2] # alternate_name_extension = product[3] logger.info("product_name ==> " + product_name) logger.info("product_prefix ==> " + product_prefix) # logger.info("primary_name_extension ==> " + primary_name_extension) # logger.info("alternate_name_extension ==> " + alternate_name_extension) # Configure additional product_type variables # -------------------------------------------- output_directory = root_directory + "rasters_" + product_name + "\\" logger.info("output_directory ==> " + output_directory) # mosaic_dataset_full_path = root_directory + "data_" + product_name + ".gdb\\" + product_name # C:\att\project\arcshare\disasters_public\scripts\testing\jessica\modis_flood\data_modis_flood_1_day.gdb\modis_flood_1_day mosaic_dataset_full_path = root_directory + "data_" + product_name + ".gdb\\" + product_name logger.info("mosaic_dataset_full_path ==> " + mosaic_dataset_full_path) loop_counter = 1 # Find out if mosaic dataset is empty. If it is empty, set initial run to true # ---------------------------------------------------------------------------- file_list = mosaic_dataset_full_path raster_list = [] # Look inside existing mosaic dataset, and append all files in mosaic to empty list (raster_list) # If there are no rasters in mosaic (i.e. empty mosaic) length of raster_list will equal 0, start initial run with arcpy.da.SearchCursor(file_list, "*") as cur: for row in cur: raster_list.append(row) if len(raster_list) > 0: initial_run = False logger.info("Files already exist in the mosaic dataset. Adding new files only (initial_run = {})" .format(str(initial_run))) else: initial_run = True logger.info("No files in the mosaic dataset. Adding all files (initial_run = {})".format(str(initial_run))) # Empty list that will populate with the paths of newly downloaded images. # This list feeds into the Add Rasters to Mosaic Tool when it is not the initial run # ---------------------------------------------------------------------------------- new_tif_list = [] for unique_date in date_list: unique_date_value = unique_date.strftime("%Y%m%d") logger.info("Beginning processing for date ==> " + unique_date_value) # Configure year variable # ------------------------ year = str(now.year) logger.info("Year value ==> " + year) # Configure day of year variable # ------------------------------- day_of_year = unique_date.timetuple().tm_yday day_of_year = str(day_of_year) if len(day_of_year) == 1: day_of_year = "00" + day_of_year elif len(day_of_year) == 2: day_of_year = "0" + day_of_year logger.info("Current day of year value ==> " + day_of_year) logger.info("Beginning coordinate loop") for coordinate in coordinate_list: # logger.info("Processing coordinate {} for day {} of the year".format(coordinate, day_of_year)) # <====== Spams the log file # Set success_failure flag # ------------------------- success_flag = False # Set file name # -------------- file_name = product_prefix + "." + "A" + year + day_of_year + "." + coordinate + "." + "061.tif" # logger.info("file_name ==> " + file_name) # <====== Spams the log file # Set download file full path # ---------------------------- out_tif = output_directory + file_name # logger.info("out_tif (primary) ==> " + out_tif) # <====== Spams the log file # Check to see if the primary file already exists in the directory # ----------------------------------------------------------------- if not os.path.isfile(out_tif): # Set full URL for query # ----------------------- full_url = url + "/" + product_prefix + "/" + year + "/" + day_of_year + "/" + file_name # logger.info("primary_url ==> " + full_url) # <====== Spams the log file try: # Make primary URL request # ------------------------- # logger.info("Making primary URL request...") # <====== Spams the log file4 response = requests.get(full_url, verify=False, stream=True, timeout=http_request_timeout, headers = {'Authorization': 'Bearer Z2FycmV0dC53LmxheW5lOloyRnljbVYwZEM1M0xteGhlVzVsUUc1aGMyRXVaMjkyOjE2Mzc3MDQ1MDM6NDZiNDQ3OGFiYzBiNDk0MDBlYmM4NjViOTY2NmQ2NzQ5MDU0YmRhOA'}) # response = UtilitiesGeneral.requests_retry_session(5).get(full_url, # timeout=http_request_timeout, headers = {'Authorization': 'Bearer '}) if response.status_code == 200: success_flag = True # else: # logger.warning("Could not connect Authorization. Earthdata token may need to be replaced.") except Exception as e: logger.error("Error connecting to primary URL==> {}".format(full_url) + " - " + str(e)) if success_flag: # Write file to output directory # ------------------------------- logger.info("Writing image #{} to output directory ==> ".format(loop_counter) + out_tif) with open(out_tif, "wb") as f: f.write(response.content) logger.info("done") new_tif_list.append(out_tif) loop_counter += 1 if not initial_run: # Add all new image files to the mosaic dataset # ------------------------------------------ logger.info("Adding image file to the mosaic dataset ==> " + mosaic_dataset_full_path) try: # arcpy.management.AddRastersToMosaicDataset(mosaic_dataset_full_path, "Raster Dataset",new_tif_list, build_pyramids=True, filter = "*.TIF", duplicate_items_action="EXCLUDE_DUPLICATES") arcpy.management.AddRastersToMosaicDataset(mosaic_dataset_full_path, "Raster Dataset", new_tif_list, filter="*.TIF", duplicate_items_action="EXCLUDE_DUPLICATES", build_pyramids="BUILD_PYRAMIDS") logger.info("All image files from the directory added to the mosaic dataset") except Exception as e: logger.error("Unable to add " + out_tif + " image files to the mosaic dataset - " + str(e)) if initial_run: # Add all image files to the mosaic dataset # ------------------------------------------ logger.info("Adding all image files to the mosaic dataset ==> " + mosaic_dataset_full_path) try: # arcpy.management.AddRastersToMosaicDataset(mosaic_dataset_full_path, "Raster Dataset",output_directory, build_pyramids=True, filter = "*.TIF",duplicate_items_action="EXCLUDE_DUPLICATES") arcpy.management.AddRastersToMosaicDataset(mosaic_dataset_full_path, "Raster Dataset", output_directory, filter="*.TIF", duplicate_items_action="EXCLUDE_DUPLICATES", build_pyramids="BUILD_PYRAMIDS") # arcpy.management.AddRastersToMosaicDataset(r"C:\att\project\arcshare\disasters_public\scripts\testing\jessica\modis_flood\data_modis_flood_1_day.gdb\modis_flood_3_day", # "Raster Dataset", # r"C:\att\project\arcshare\disasters_public\scripts\testing\jessica\modis_flood\rasters_modis_flood_3_day", # "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", None, 0, 1500, None, '', "SUBFOLDERS", # "EXCLUDE_DUPLICATES", "BUILD_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", '', "NO_FORCE_SPATIAL_REFERENCE", # "NO_STATISTICS", None, "NO_PIXEL_CACHE", r"C:\Users\jclarke4\AppData\Local\ESRI\rasterproxies\modis_flood_3_day") logger.info("All image files from the directory added to the mosaic dataset") except Exception as e: logger.error("Unable to add " + output_directory + " image files to the mosaic dataset - " + str(e)) logger.info("Image file parsing and processing done ({})".format(product_name)) # Update attributes in mosaic dataset table # ------------------------------------------ logger.info("Updating attributes in the mosaic dataset table for " + product_name) with arcpy.da.UpdateCursor(mosaic_dataset_full_path, mosaic_dataset_fields) as cursor: for row in cursor: day_of_year = int(row[0].split(".")[1][-3:]) new_year = int(row[0].split(".")[1][1:5]) tile = row[0].split(".")[2] date = (datetime.datetime(new_year, 1, 1) + datetime.timedelta(day_of_year - 1)).strftime('%m/%d/%Y') date_text = (datetime.datetime(new_year, 1, 1) + datetime.timedelta(day_of_year - 1)).strftime('%d-%b-%Y') row[1] = date row[2] = date_text row[3] = tile row[4] = product_name cursor.updateRow(row) # logger.info("Mosaic dataset row updated: Image date({}), Image date TXT({}), tile({}), Product({})" # .format(row[1], row[2], row[3], row[4])) # <====== Spams the log file # del cursor logger.info("done") # Remove directory images older than the age_of_imagery parameter # ---------------------------------------------------------------- logger.info("Removing directory images older than " + str(age_of_imagery) + " days for " + product_name) for root, dirs, files in os.walk(output_directory): for directory_file in files: file_julian_date = directory_file.split(".")[1][-7:] file_date = datetime.datetime.strptime(file_julian_date, '%Y%j').date().strftime('%Y%m%d') if file_date < remove_date: logger.warning("Removing file " + root + directory_file) os.remove(root + directory_file) arcpy.Delete_management(root + directory_file) # logger.info("Current file does not need removal ==> " + root + directory_file) # <====== Spams the log file logger.info("done") # Set the where clause for mosaic dataset deletions # -------------------------------------------------- Note: If age of imagery is set to 1 it will delete all of the images inserted. where_clause = "Product = '" + product_name + "' AND (Date < date '" + remove_date_query + "' OR Date IS NULL)" logger.info("where clause for mosaic dataset deletions ==> " + where_clause) # Remove mosaic dataset images older than the age_of_imagery parameter # --------------------------------------------------------------------- logger.info("Removing mosaic dataset images older than " + str(age_of_imagery) + " days for " + product_name) try: arcpy.RemoveRastersFromMosaicDataset_management(mosaic_dataset_full_path, where_clause) logger.info("done") except Exception as e: logger.error("Unable to remove mosaic dataset images with the following where clause for " + product_name + ": " + str(where_clause) + " - " + str(e)) # Repair mosaic dataset paths # ---------------------------- logger.info("Repairing mosaic dataset paths for " + product_name) UtilitiesRaster.repair_mosaic_paths(logger, mosaic_dataset_full_path, output_directory) logger.info("Product loop done for {}".format(product_name)) for product in product_list: # Refresh Service # --------------- product_name = product[0] logger.info("Updating time slider for web service for " + product_name) service_name = product_name service_type = "ImageServer" # service folder may need to be changed when replacing it with the old MODIS product service_folder = "test" arcpy.PublishingTools.RefreshService(serviceName = service_name, serviceType = service_type, serviceFolder = service_folder) logger.info("Script complete") # Script start # ------------- if __name__ == "__main__": sys.exit(main())