Compare commits

...

2 Commits

@ -13,10 +13,12 @@ arke is a dumb python monitoring client i am currently working on to monitor htt
- vars file that points the client at my servers - vars file that points the client at my servers
## TODO ## TODO
- [ ] Fix loading json from file - [X] Fix loading json from file
- alright see "json loading" heading in this file. - alright see "json loading" heading in this file.
- [ ] Fix state comparison and post to only post the item that has changed since last run - [X] Fix state comparison and post to only post the item that has changed since last run
- [ ] update this readme. - i thought I mostly had this fixed but i'm still running into issues.
- requires some additional digging.
- [X] update this readme.
## json loading ## json loading
confused here. in python, a dict is the key/value store that is used to map to json. all my functions return a dict: confused here. in python, a dict is the key/value store that is used to map to json. all my functions return a dict:
@ -75,3 +77,6 @@ json.decoder.JSONDecodeError: Extra data: line 1 column 133 (char 132)
``` ```
this is where I get. loading that file doesn't work. Which seems to suggest that how I'm crafting the json string written to the file with `json.dump` is problematic! But I haven't figured out /why/ ugh. this is where I get. loading that file doesn't work. Which seems to suggest that how I'm crafting the json string written to the file with `json.dump` is problematic! But I haven't figured out /why/ ugh.
### fixed
oh my god i wasn't cleaning up the file after last run so previous run charactes was fucking up the file!!! aaaah!

@ -14,9 +14,9 @@ logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%Y/%m/%d %I:%M:%S
logger = logging.getLogger("arke") logger = logging.getLogger("arke")
last_round_file = "/shared/state.json" last_round_file_path = "/Users/jledbetter/shared/state.json"
this_round_file = "/shared/results.json" this_round_file_path = "/Users/jledbetter/shared/results.json"
alert_file = "/shared/alerts.log" alert_file_path = "/Users/jledbetter/shared/alerts.log"
def monitor_HttpTargets(monitoringtargets): def monitor_HttpTargets(monitoringtargets):
@ -42,7 +42,7 @@ def monitor_DomainExpiry(targets):
expire_year = whois.query(domain).expiration_date.year expire_year = whois.query(domain).expiration_date.year
try: try:
if expire_year - current_year <= 1: if expire_year - current_year <= 1:
responseTable[domain] = "Domain expiring in < 1 year, please rectify." responseTable[domain] = "Domain expiring in less than 1 year, please rectify."
else: else:
responseTable[domain] = "Domain is healthy." responseTable[domain] = "Domain is healthy."
except: except:
@ -61,7 +61,7 @@ def monitor_TlsExpiry(targets):
expiration = datetime.strptime(x509.get_notAfter().decode(), '%Y%m%d%H%M%SZ') expiration = datetime.strptime(x509.get_notAfter().decode(), '%Y%m%d%H%M%SZ')
try: try:
if expiration - current_year <= 1: if expiration - current_year <= 1:
responseTable[site] = "TLS expiring in < 30 days, please rectify." responseTable[site] = "TLS expiring in less than 30 days, please rectify."
else: else:
responseTable[site] = "cert is healthy." responseTable[site] = "cert is healthy."
except: except:
@ -87,42 +87,46 @@ while is_on:
datastore['whois'] = monitor_DomainExpiry(arkevars.domains_to_check) datastore['whois'] = monitor_DomainExpiry(arkevars.domains_to_check)
# write new results to file # write new results to file
with open(this_round_file, "a+", encoding="utf-8") as outfile: with open(this_round_file_path, "a+", encoding="utf-8") as outfile:
json.dump(datastore, outfile, ensure_ascii=False, sort_keys=True) json.dump(datastore, outfile, ensure_ascii=False, sort_keys=True)
# track state # track state
with open(this_round_file, "a+", encoding="utf-8") as file: with open(this_round_file_path, "a+", encoding="utf-8") as new_file:
if os.path.exists(last_round_file): if os.path.exists(last_round_file_path):
stateFile = open(last_round_file, "r") with open(last_round_file_path, "r+", encoding="utf-8") as old_file:
oldData = old_file.read()
else: else:
stateFile = open(last_round_file, "w+") with open(last_round_file_path, "a+", encoding="utf-8") as old_file:
oldData = old_file.read()
newData = open(this_round_file_path, "r").read()
oldData = stateFile.read() # pdb.set_trace()
if oldData != file.read(): if oldData != newData:
stateChanged = True stateChanged = True
else: else:
stateChanged = False stateChanged = False
stateFile.close() pdb.set_trace()
time.sleep(1)
# delete state.log so I can write to it cleanly # delete state.json so I can write to it cleanly
os.remove(last_round_file) os.remove(last_round_file_path)
# queue up an alert if stateChanged = True # queue up an alert if stateChanged = True
results = [] results = []
with open(this_round_file, "r+", encoding="utf-8") as json_File: with open(this_round_file_path, "r+", encoding="utf-8") as json_File:
json_data = json.load(json_File) json_data = json.load(json_File)
for key, value in json_data.items(): for key, value in json_data.items():
for key, value in value.items(): for key, value in value.items():
if stateChanged is True: if stateChanged is True:
errorFile = open(alert_file, "a+") errorFile = open(alert_file_path, "a+")
errorText = str(key) + " " + str(value) + "\n" errorText = str(key) + " " + str(value) + "\n"
errorFile.write(errorText) errorFile.write(errorText)
# Copy current results to state.log file for next iteration # Copy current results to state.log file for next iteration
with open(last_round_file, "a+") as json_File: with open(last_round_file_path, "a+") as json_File:
json_datastore = json.dumps(datastore, ensure_ascii=False, sort_keys=True) json_datastore = json.dumps(datastore, ensure_ascii=False, sort_keys=True)
json_File.write(json_datastore) json_File.write(json_datastore)
os.remove(this_round_file) os.remove(this_round_file_path)
time.sleep(60) time.sleep(60)

Loading…
Cancel
Save