Merge pull request #85 from besendorf/main
Change hardcoded paths make analysis.py usable standalone
This commit is contained in:
commit
5cc0abcf83
@ -16,9 +16,8 @@ import os
|
|||||||
containing a capture.pcap file.
|
containing a capture.pcap file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if len(sys.argv) == 2:
|
def analyze(capture_directory,frontend=False):
|
||||||
capture_directory = sys.argv[1]
|
|
||||||
if os.path.isdir(capture_directory):
|
if os.path.isdir(capture_directory):
|
||||||
|
|
||||||
manager = Manager()
|
manager = Manager()
|
||||||
@ -29,6 +28,8 @@ if __name__ == "__main__":
|
|||||||
zeek.start_zeek()
|
zeek.start_zeek()
|
||||||
alerts["zeek"] = zeek.retrieve_alerts()
|
alerts["zeek"] = zeek.retrieve_alerts()
|
||||||
|
|
||||||
|
if not os.path.isdir(os.path.join(capture_directory, "assets")):
|
||||||
|
os.mkdir(os.path.join(capture_directory, "assets"))
|
||||||
# whitelist.json writing.
|
# whitelist.json writing.
|
||||||
with open(os.path.join(capture_directory, "assets/whitelist.json"), "w") as f:
|
with open(os.path.join(capture_directory, "assets/whitelist.json"), "w") as f:
|
||||||
f.write(json.dumps(zeek.retrieve_whitelist(),
|
f.write(json.dumps(zeek.retrieve_whitelist(),
|
||||||
@ -67,9 +68,30 @@ if __name__ == "__main__":
|
|||||||
f.write(json.dumps(report, indent=4, separators=(',', ': ')))
|
f.write(json.dumps(report, indent=4, separators=(',', ': ')))
|
||||||
|
|
||||||
# Generate the report
|
# Generate the report
|
||||||
report = Report(capture_directory)
|
report = Report(capture_directory,frontend)
|
||||||
report.generate_report()
|
report.generate_report()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print("The directory doesn't exist.")
|
print("The directory doesn't exist.")
|
||||||
|
|
||||||
|
|
||||||
|
def usage():
|
||||||
|
print("""Usage: python analysis.py [capture_directory]
|
||||||
|
where [capture_directory] is a directory containing a capture.pcap file
|
||||||
|
analysis.py -f starts the analysis in frontend mode intended to be called by the TinyCheck frontend.""")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) == 2: #called manually without frontend
|
||||||
|
analyze(sys.argv[1], False)
|
||||||
|
elif len(sys.argv) == 3:
|
||||||
|
if(sys.argv[1]) == "-f": #frontend mode
|
||||||
|
analyze(sys.argv[2], True)
|
||||||
else:
|
else:
|
||||||
print("Please specify a capture directory in argument.")
|
usage()
|
||||||
|
|
||||||
|
else:
|
||||||
|
usage()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ from utils import get_config
|
|||||||
|
|
||||||
class Report(object):
|
class Report(object):
|
||||||
|
|
||||||
def __init__(self, capture_directory):
|
def __init__(self, capture_directory, frontend):
|
||||||
self.capture_directory = capture_directory
|
self.capture_directory = capture_directory
|
||||||
self.alerts = self.read_json(os.path.join(
|
self.alerts = self.read_json(os.path.join(
|
||||||
capture_directory, "assets/alerts.json"))
|
capture_directory, "assets/alerts.json"))
|
||||||
@ -21,6 +21,9 @@ class Report(object):
|
|||||||
capture_directory, "assets/whitelist.json"))
|
capture_directory, "assets/whitelist.json"))
|
||||||
self.conns = self.read_json(os.path.join(
|
self.conns = self.read_json(os.path.join(
|
||||||
capture_directory, "assets/conns.json"))
|
capture_directory, "assets/conns.json"))
|
||||||
|
self.device = None
|
||||||
|
self.capinfos = None
|
||||||
|
if frontend:
|
||||||
self.device = self.read_json(os.path.join(
|
self.device = self.read_json(os.path.join(
|
||||||
capture_directory, "assets/device.json"))
|
capture_directory, "assets/device.json"))
|
||||||
self.capinfos = self.read_json(os.path.join(
|
self.capinfos = self.read_json(os.path.join(
|
||||||
@ -204,12 +207,14 @@ class Report(object):
|
|||||||
"""
|
"""
|
||||||
header = "<div class=\"header\">"
|
header = "<div class=\"header\">"
|
||||||
header += "<div class=\"logo\"></div>"
|
header += "<div class=\"logo\"></div>"
|
||||||
|
if self.device is not None:
|
||||||
header += "<p><br /><strong>{}: {}</strong><br />".format(self.template["device_name"],
|
header += "<p><br /><strong>{}: {}</strong><br />".format(self.template["device_name"],
|
||||||
self.device["name"])
|
self.device["name"])
|
||||||
header += "{}: {}<br />".format(self.template["device_mac"],
|
header += "{}: {}<br />".format(self.template["device_mac"],
|
||||||
self.device["mac_address"])
|
self.device["mac_address"])
|
||||||
header += "{} {}<br />".format(self.template["report_generated_on"],
|
header += "{} {}<br />".format(self.template["report_generated_on"],
|
||||||
datetime.now().strftime("%d/%m/%Y - %H:%M:%S"))
|
datetime.now().strftime("%d/%m/%Y - %H:%M:%S"))
|
||||||
|
if self.capinfos is not None:
|
||||||
header += "{}: {}s<br />".format(self.template["capture_duration"],
|
header += "{}: {}s<br />".format(self.template["capture_duration"],
|
||||||
self.capinfos["Capture duration"])
|
self.capinfos["Capture duration"])
|
||||||
header += "{}: {}<br />".format(self.template["packets_number"],
|
header += "{}: {}<br />".format(self.template["packets_number"],
|
||||||
|
@ -236,6 +236,7 @@ class ZeekEngine(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
try: # Domain history check.
|
try: # Domain history check.
|
||||||
|
|
||||||
whois_record = whois.whois(c["resolution"])
|
whois_record = whois.whois(c["resolution"])
|
||||||
creation_date = whois_record.creation_date if type(
|
creation_date = whois_record.creation_date if type(
|
||||||
whois_record.creation_date) is not list else whois_record.creation_date[0]
|
whois_record.creation_date) is not list else whois_record.creation_date[0]
|
||||||
@ -247,6 +248,7 @@ class ZeekEngine(object):
|
|||||||
"host": c["resolution"],
|
"host": c["resolution"],
|
||||||
"level": "Moderate",
|
"level": "Moderate",
|
||||||
"id": "ACT-02"})
|
"id": "ACT-02"})
|
||||||
|
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -443,11 +445,10 @@ class ZeekEngine(object):
|
|||||||
"""
|
"""
|
||||||
Start zeek and check the logs.
|
Start zeek and check the logs.
|
||||||
"""
|
"""
|
||||||
sp.Popen("cd {} && /opt/zeek/bin/zeek -Cr capture.pcap protocols/ssl/validate-certs".format(
|
sp.Popen("cd {} && zeek -Cr capture.pcap protocols/ssl/validate-certs".format(
|
||||||
self.working_dir), shell=True).wait()
|
self.working_dir), shell=True).wait()
|
||||||
sp.Popen("cd {} && mv *.log assets/".format(self.working_dir),
|
sp.Popen("cd {} && mv *.log assets/".format(self.working_dir),
|
||||||
shell=True).wait()
|
shell=True).wait()
|
||||||
|
|
||||||
self.fill_dns(self.working_dir + "/assets/")
|
self.fill_dns(self.working_dir + "/assets/")
|
||||||
self.netflow_check(self.working_dir + "/assets/")
|
self.netflow_check(self.working_dir + "/assets/")
|
||||||
self.ssl_check(self.working_dir + "/assets/")
|
self.ssl_check(self.working_dir + "/assets/")
|
||||||
|
@ -10,7 +10,7 @@ import os
|
|||||||
from functools import reduce
|
from functools import reduce
|
||||||
|
|
||||||
# I'm not going to use an ORM for that.
|
# I'm not going to use an ORM for that.
|
||||||
parent = "/".join(sys.path[0].split("/")[:-1])
|
parent = os.path.split(os.path.dirname(os.path.abspath(sys.argv[0])))[0]
|
||||||
conn = sqlite3.connect(os.path.join(parent, "tinycheck.sqlite3"))
|
conn = sqlite3.connect(os.path.join(parent, "tinycheck.sqlite3"))
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ class Analysis(object):
|
|||||||
if self.token is not None:
|
if self.token is not None:
|
||||||
parent = "/".join(sys.path[0].split("/")[:-2])
|
parent = "/".join(sys.path[0].split("/")[:-2])
|
||||||
sp.Popen(
|
sp.Popen(
|
||||||
[sys.executable, "{}/analysis/analysis.py".format(parent), "/tmp/{}".format(self.token)])
|
[sys.executable, "{}/analysis/analysis.py".format(parent), "-f", "/tmp/{}".format(self.token)])
|
||||||
return {"status": True,
|
return {"status": True,
|
||||||
"message": "Analysis started",
|
"message": "Analysis started",
|
||||||
"token": self.token}
|
"token": self.token}
|
||||||
|
Loading…
Reference in New Issue
Block a user