import datetime
import requests
from dateutil.parser import parse as parsedate
import os
import sys
from os import listdir
from os.path import isfile, join
import traceback
import subprocess
import time
import logging
import logging.handlers

# Usage:
# python3 downloadViirs.py;  aws s3 sync FIRMS s3://alertwildfirebackup/FIRMS/
#
# It seems like you need to generate a new key through the website eery single day https://nrt3.modaps.eosdis.nasa.gov/profile/app-keys



def download_file(url, local_filename, headers):
    directory = os.path.split(local_filename)[0]
    if not os.path.exists(directory):
        os.makedirs(directory)
    # NOTE the stream=True parameter below
    with requests.get(url, headers=headers, stream=True) as r:
        r.raise_for_status()
        with open(local_filename, 'wb') as f:
            for chunk in r.iter_content(chunk_size=8192): 
                # If you have chunk encoded response uncomment if
                # and set chunk_size parameter to None.
                #if chunk: 
                f.write(chunk)
    return local_filename

def processFiles():
    inputDirectory = "./FIRMS/suomi-npp-viirs-c2/USA_contiguous_and_Hawaii/"
    outputDirectory = "./FIRMS/suomi-npp-viirs-c2/USA_contiguous_and_Hawaii_processed/"
    if not os.path.exists(outputDirectory):
        os.makedirs(outputDirectory)
    onlyfiles = [f for f in listdir(inputDirectory) if 'txt' in f and isfile(join(inputDirectory, f))]
    for fname in onlyfiles:
        if not os.path.isfile(join(outputDirectory, fname)):
            print('need to make', fname)
            result = processFile(join(inputDirectory, fname), join(outputDirectory, fname))
            with open(os.path.join(outputDirectory,fname), "w") as text_file:
                text_file.write(result)
    # print(onlyfiles)

def processFile(inputName, outputName):
    print('processing', inputName, outputName)
    with open (inputName, "r") as myfile:
        data=myfile.readlines()
        # get column numbers
        cats = data[0].split(',')
        longIndex = cats.index('longitude')
        latIndex = cats.index('latitude')
        conf = cats.index('confidence')
        bright_ti5 = cats.index('frp')
        # prun columns
        plop = list(map(lambda x: x.split(','),data))
        splah = list(filter(lambda x: len(x) >= len(cats),plop))
        #plop = filter(lambda x: not 'low' in x[8],splah)
        prunedColumns = [[x[latIndex], x[longIndex], x[bright_ti5]] for x in splah] 
        strRows = map(lambda x: ','.join(x),prunedColumns)
        result = '\n'.join(strRows)
        # print(result)
        return result

if __name__ == "__main__":
    logger = logging.getLogger()
    logger.addHandler(logging.handlers.SMTPHandler(
        mailhost=("mail.simtable.com", 587),
        fromaddr="script_logger@simtable.com",
        toaddrs="script_logger@simtable.com",
        subject="EXCEPTION",
        credentials=('script_logger@simtable.com', 'script_logger'),
        secure=()))
    while True:
    # url = "https://nrt3.modaps.eosdis.nasa.gov/api/v2/content/archives/FIRMS/suomi-npp-viirs-c2/USA_contiguous_and_Hawaii"
        now = datetime.datetime.now()
        currYear = now.year
        newYearPasses = 0
        # for year in range(2021, currYear+1):
        nowish = now.timetuple() 
        dayOfYear = nowish.tm_yday

        errors = []
        for day in range(max(1,dayOfYear-7), dayOfYear ):
            print(currYear, day)
            url="https://nrt3.modaps.eosdis.nasa.gov/api/v2/content/archives/FIRMS/suomi-npp-viirs-c2/USA_contiguous_and_Hawaii/SUOMI_VIIRS_C2_USA_contiguous_and_Hawaii_VNP14IMGTDL_NRT_" + str(currYear) + str(day).zfill(3) + ".txt"
            localFilename = "./FIRMS/suomi-npp-viirs-c2/USA_contiguous_and_Hawaii/SUOMI_VIIRS_C2_USA_contiguous_and_Hawaii_VNP14IMGTDL_NRT_" + str(currYear) + str(day).zfill(3) + ".txt"
            headers = {'Authorization': "Bearer ZG9vZGVyc29uOlpEQXdaRE55Y3pCdVFHZHRZV2xzTG1OdmJRPT06MTY0MTg0NTA1Nzo5MGM5OTljYjk2MDM5Mjg4NWJkY2U0NWQyZGExNzk3NDYxMGRiMWNk"}
            if not os.path.isfile(localFilename) :
                try:
                    print('downloading', url)
                    download_file(url, localFilename, headers)
                    print('downloaded', url)
                except Exception:
                    errors.append(traceback.format_exc())
                    print(traceback.format_exc())
        if len(errors)>0:
            logging.exception(errors)

        processFiles()
        try:
            subprocess.run(["aws", "s3", "sync", "FIRMS", "s3://alertwildfirebackup/FIRMS/" ])
        except Exception:
            print(traceback.format_exc())
            logging.exception(traceback.format_exc())
        
        print('sleeping')
        time.sleep(60*60*4) # take 4 hour nap


