Compare commits

...

4 Commits

@ -14,5 +14,64 @@ arke is a dumb python monitoring client i am currently working on to monitor htt
## TODO
- [ ] Fix loading json from file
- alright see "json loading" heading in this file.
- [ ] Fix state comparison and post to only post the item that has changed since last run
- [ ] update this readme.
## json loading
confused here. in python, a dict is the key/value store that is used to map to json. all my functions return a dict:
``` python
def monitor_HttpTargets(monitoringtargets):
responseTable = {}
...
return responseTable
def monitor_DomainExpiry(targets):
responseTable = {}
...
return responseTable
def monitor_TlsExpiry(targets):
responseTable = {}
...
return responseTable
```
Ok so that part's straight forward. Then, i combine all of them into a single keyed dict, so I have a dict of dicts:
``` python
datastore = {}
datastore['http'] = monitor_HttpTargets(arkevars.httpTargets)
datastore['certs'] = monitor_TlsExpiry(arkevars.tlsTargets)
datastore['whois'] = monitor_DomainExpiry(arkevars.domains_to_check)
```
OK again, straight forward. Here's where shit gets less obvious to me. I need to convert this variable containing a dict into a json object in a file. I do this like so:
```python
# write new results to file
with open(this_round_file, "a+", encoding="utf-8") as outfile:
json.dump(datastore, outfile, ensure_ascii=False, sort_keys=True)
```
ok, so crafting the json object seems to make sense, and seems to be pretty straight forward. What about reading the 'json' object? WELL THEREIN LIES THE TALE MY GOOD BITCH
```python
Traceback (most recent call last):
File "arke.py", line 114, in <module>
json_data = json.load(json_File)
File "/usr/lib/python3.8/json/__init__.py", line 293, in load
return loads(fp.read(),
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 340, in decode
raise JSONDecodeError("Extra data", s, end)
json.decoder.JSONDecodeError: Extra data: line 1 column 133 (char 132)
```
this is where I get. loading that file doesn't work. Which seems to suggest that how I'm crafting the json string written to the file with `json.dump` is problematic! But I haven't figured out /why/ ugh.

@ -8,6 +8,7 @@ import whois
import OpenSSL
import ssl
import time
import pdb
logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%Y/%m/%d %I:%M:%S %p',level=logging.INFO,filename='arke.log')
logger = logging.getLogger("arke")
@ -73,45 +74,43 @@ is_on = True
while is_on:
today = datetime.today()
datastore = {}
# make sure http targets are /up/
datastore = monitor_HttpTargets(arkevars.httpTargets)
json_string = json.dumps(datastore)
datastore['http'] = monitor_HttpTargets(arkevars.httpTargets)
# get SSL certs on http targets
cert_info = monitor_TlsExpiry(arkevars.tlsTargets)
cert_json = json.dumps(cert_info)
datastore['certs'] = monitor_TlsExpiry(arkevars.tlsTargets)
# get whois info on domain targets
domain_info = monitor_DomainExpiry(arkevars.domains_to_check)
domain_json = json.dumps(domain_info)
datastore['whois'] = monitor_DomainExpiry(arkevars.domains_to_check)
# write new results to file
file = open(this_round_file, "a+")
file.write(json_string)
file.write(cert_json)
file.write(domain_json)
file.close()
with open(this_round_file, "a+", encoding="utf-8") as outfile:
json.dump(datastore, outfile, ensure_ascii=False, sort_keys=True)
# track state
file = open(this_round_file, "r")
if os.path.exists(last_round_file):
stateFile = open(last_round_file, "r")
else:
stateFile = open(last_round_file, "w+")
oldData = stateFile.read()
if oldData != file.read():
stateChanged = True
else:
stateChanged = False
with open(this_round_file, "a+", encoding="utf-8") as file:
if os.path.exists(last_round_file):
stateFile = open(last_round_file, "r")
else:
stateFile = open(last_round_file, "w+")
oldData = stateFile.read()
if oldData != file.read():
stateChanged = True
else:
stateChanged = False
stateFile.close()
# delete state.log so I can write to it cleanly
os.remove(last_round_file)
# queue up an alert if stateChanged = True
results = []
with open(this_round_file, "r") as json_File:
with open(this_round_file, "r+", encoding="utf-8") as json_File:
pdb.set_trace()
json_data = json.load(json_File)
for item in json_data:
results.append(item)
@ -123,10 +122,9 @@ while is_on:
errorFile.write(errorText)
# Copy current results to state.log file for next iteration
errorFile = open(last_round_file, "a+")
errorFile.write(json_string)
errorFile.write(cert_json)
errorFile.write(domain_json)
errorFile.close()
with open(last_round_file, "a+") as json_File:
json_datastore = json.dumps(datastore, ensure_ascii=False, sort_keys=True)
json_File.write(json_datastore)
os.remove(this_round_file)
time.sleep(60)

13
poetry.lock generated

@ -102,14 +102,6 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
version = "1.14.0"
[[package]]
category = "main"
description = "SSL wrapper for socket objects (2.3, 2.4, 2.5 compatible)"
name = "ssl"
optional = false
python-versions = "*"
version = "1.16"
[[package]]
category = "main"
description = "HTTP library with thread-safe connection pooling, file post, and more."
@ -132,7 +124,7 @@ python-versions = "*"
version = "0.9.5"
[metadata]
content-hash = "72b13a9691dd1c6c18fce13c79473963749c33fc5c68b9be5ea03747e28c94b6"
content-hash = "c317d94b6a13f0c579fef558c2ed3bfddd8a0d3fa7f16728f551cd5b193975a1"
python-versions = "^3.7"
[metadata.files]
@ -221,9 +213,6 @@ six = [
{file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"},
{file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"},
]
ssl = [
{file = "ssl-1.16.tar.gz", hash = "sha256:ac21156fee6aee9eb8d765bbb16f5f49492d81ff4b22f7b8fc001d2251120930"},
]
urllib3 = [
{file = "urllib3-1.25.8-py2.py3-none-any.whl", hash = "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc"},
{file = "urllib3-1.25.8.tar.gz", hash = "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"},

Loading…
Cancel
Save