diff --git a/arke.py b/arke.py index a62420b..3069ffa 100644 --- a/arke.py +++ b/arke.py @@ -14,9 +14,9 @@ logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%Y/%m/%d %I:%M:%S logger = logging.getLogger("arke") -last_round_file = "/shared/state.json" -this_round_file = "/shared/results.json" -alert_file = "/shared/alerts.log" +last_round_file_path = "/Users/jledbetter/shared/state.json" +this_round_file_path = "/Users/jledbetter/shared/results.json" +alert_file_path = "/Users/jledbetter/shared/alerts.log" def monitor_HttpTargets(monitoringtargets): @@ -42,7 +42,7 @@ def monitor_DomainExpiry(targets): expire_year = whois.query(domain).expiration_date.year try: if expire_year - current_year <= 1: - responseTable[domain] = "Domain expiring in < 1 year, please rectify." + responseTable[domain] = "Domain expiring in less than 1 year, please rectify." else: responseTable[domain] = "Domain is healthy." except: @@ -61,7 +61,7 @@ def monitor_TlsExpiry(targets): expiration = datetime.strptime(x509.get_notAfter().decode(), '%Y%m%d%H%M%SZ') try: if expiration - current_year <= 1: - responseTable[site] = "TLS expiring in < 30 days, please rectify." + responseTable[site] = "TLS expiring in less than 30 days, please rectify." else: responseTable[site] = "cert is healthy." except: @@ -87,42 +87,46 @@ while is_on: datastore['whois'] = monitor_DomainExpiry(arkevars.domains_to_check) # write new results to file - with open(this_round_file, "a+", encoding="utf-8") as outfile: + with open(this_round_file_path, "a+", encoding="utf-8") as outfile: json.dump(datastore, outfile, ensure_ascii=False, sort_keys=True) # track state - with open(this_round_file, "a+", encoding="utf-8") as file: - if os.path.exists(last_round_file): - stateFile = open(last_round_file, "r") + with open(this_round_file_path, "a+", encoding="utf-8") as new_file: + if os.path.exists(last_round_file_path): + with open(last_round_file_path, "r+", encoding="utf-8") as old_file: + oldData = old_file.read() else: - stateFile = open(last_round_file, "w+") + with open(last_round_file_path, "a+", encoding="utf-8") as old_file: + oldData = old_file.read() + newData = open(this_round_file_path, "r").read() - oldData = stateFile.read() - if oldData != file.read(): + # pdb.set_trace() + if oldData != newData: stateChanged = True else: stateChanged = False - stateFile.close() + pdb.set_trace() + time.sleep(1) - # delete state.log so I can write to it cleanly - os.remove(last_round_file) + # delete state.json so I can write to it cleanly + os.remove(last_round_file_path) # queue up an alert if stateChanged = True results = [] - with open(this_round_file, "r+", encoding="utf-8") as json_File: + with open(this_round_file_path, "r+", encoding="utf-8") as json_File: json_data = json.load(json_File) for key, value in json_data.items(): for key, value in value.items(): if stateChanged is True: - errorFile = open(alert_file, "a+") + errorFile = open(alert_file_path, "a+") errorText = str(key) + " " + str(value) + "\n" errorFile.write(errorText) # Copy current results to state.log file for next iteration - with open(last_round_file, "a+") as json_File: + with open(last_round_file_path, "a+") as json_File: json_datastore = json.dumps(datastore, ensure_ascii=False, sort_keys=True) json_File.write(json_datastore) - os.remove(this_round_file) + os.remove(this_round_file_path) time.sleep(60)