Compare commits

...

2 Commits

@ -13,10 +13,12 @@ arke is a dumb python monitoring client i am currently working on to monitor htt
- vars file that points the client at my servers
## TODO
- [ ] Fix loading json from file
- [X] Fix loading json from file
- alright see "json loading" heading in this file.
- [ ] Fix state comparison and post to only post the item that has changed since last run
- [ ] update this readme.
- [X] Fix state comparison and post to only post the item that has changed since last run
- i thought I mostly had this fixed but i'm still running into issues.
- requires some additional digging.
- [X] update this readme.
## json loading
confused here. in python, a dict is the key/value store that is used to map to json. all my functions return a dict:
@ -75,3 +77,6 @@ json.decoder.JSONDecodeError: Extra data: line 1 column 133 (char 132)
```
this is where I get. loading that file doesn't work. Which seems to suggest that how I'm crafting the json string written to the file with `json.dump` is problematic! But I haven't figured out /why/ ugh.
### fixed
oh my god i wasn't cleaning up the file after last run so previous run charactes was fucking up the file!!! aaaah!

@ -14,9 +14,9 @@ logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%Y/%m/%d %I:%M:%S
logger = logging.getLogger("arke")
last_round_file = "/shared/state.json"
this_round_file = "/shared/results.json"
alert_file = "/shared/alerts.log"
last_round_file_path = "/Users/jledbetter/shared/state.json"
this_round_file_path = "/Users/jledbetter/shared/results.json"
alert_file_path = "/Users/jledbetter/shared/alerts.log"
def monitor_HttpTargets(monitoringtargets):
@ -42,7 +42,7 @@ def monitor_DomainExpiry(targets):
expire_year = whois.query(domain).expiration_date.year
try:
if expire_year - current_year <= 1:
responseTable[domain] = "Domain expiring in < 1 year, please rectify."
responseTable[domain] = "Domain expiring in less than 1 year, please rectify."
else:
responseTable[domain] = "Domain is healthy."
except:
@ -61,7 +61,7 @@ def monitor_TlsExpiry(targets):
expiration = datetime.strptime(x509.get_notAfter().decode(), '%Y%m%d%H%M%SZ')
try:
if expiration - current_year <= 1:
responseTable[site] = "TLS expiring in < 30 days, please rectify."
responseTable[site] = "TLS expiring in less than 30 days, please rectify."
else:
responseTable[site] = "cert is healthy."
except:
@ -87,42 +87,46 @@ while is_on:
datastore['whois'] = monitor_DomainExpiry(arkevars.domains_to_check)
# write new results to file
with open(this_round_file, "a+", encoding="utf-8") as outfile:
with open(this_round_file_path, "a+", encoding="utf-8") as outfile:
json.dump(datastore, outfile, ensure_ascii=False, sort_keys=True)
# track state
with open(this_round_file, "a+", encoding="utf-8") as file:
if os.path.exists(last_round_file):
stateFile = open(last_round_file, "r")
with open(this_round_file_path, "a+", encoding="utf-8") as new_file:
if os.path.exists(last_round_file_path):
with open(last_round_file_path, "r+", encoding="utf-8") as old_file:
oldData = old_file.read()
else:
stateFile = open(last_round_file, "w+")
with open(last_round_file_path, "a+", encoding="utf-8") as old_file:
oldData = old_file.read()
newData = open(this_round_file_path, "r").read()
oldData = stateFile.read()
if oldData != file.read():
# pdb.set_trace()
if oldData != newData:
stateChanged = True
else:
stateChanged = False
stateFile.close()
pdb.set_trace()
time.sleep(1)
# delete state.log so I can write to it cleanly
os.remove(last_round_file)
# delete state.json so I can write to it cleanly
os.remove(last_round_file_path)
# queue up an alert if stateChanged = True
results = []
with open(this_round_file, "r+", encoding="utf-8") as json_File:
with open(this_round_file_path, "r+", encoding="utf-8") as json_File:
json_data = json.load(json_File)
for key, value in json_data.items():
for key, value in value.items():
if stateChanged is True:
errorFile = open(alert_file, "a+")
errorFile = open(alert_file_path, "a+")
errorText = str(key) + " " + str(value) + "\n"
errorFile.write(errorText)
# Copy current results to state.log file for next iteration
with open(last_round_file, "a+") as json_File:
with open(last_round_file_path, "a+") as json_File:
json_datastore = json.dumps(datastore, ensure_ascii=False, sort_keys=True)
json_File.write(json_datastore)
os.remove(this_round_file)
os.remove(this_round_file_path)
time.sleep(60)

Loading…
Cancel
Save