First commit!
This commit is contained in:
739
analysis/classes/engine.py
Executable file
739
analysis/classes/engine.py
Executable file
@ -0,0 +1,739 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess as sp
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
|
||||
import ssl
|
||||
import socket
|
||||
import OpenSSL
|
||||
import requests
|
||||
|
||||
import pydig
|
||||
import whois
|
||||
from publicsuffix2 import get_sld
|
||||
from netaddr import IPAddress, IPNetwork
|
||||
from classes.jarm import get_jarm
|
||||
from utils import get_config, get_iocs, get_whitelist
|
||||
|
||||
|
||||
class Engine():
|
||||
|
||||
def __init__(self, capture_directory):
|
||||
|
||||
# Set some vars.
|
||||
self.analysis_start = datetime.now()
|
||||
self.connected = self.check_internet()
|
||||
self.working_dir = capture_directory
|
||||
self.assets_dir = f"{capture_directory}/assets/"
|
||||
self.rules_file = "/tmp/rules.rules"
|
||||
self.pcap_path = os.path.join(self.working_dir, "capture.pcap")
|
||||
self.records = []
|
||||
self.alerts = []
|
||||
self.dns = []
|
||||
self.files = []
|
||||
self.whitelist = []
|
||||
self.uncategorized = []
|
||||
self.analysed = []
|
||||
self.dns_failed = []
|
||||
self.dns_checked = []
|
||||
self.cert_checked = []
|
||||
self.errors = []
|
||||
self.analysis_end = None
|
||||
|
||||
# Get configuration
|
||||
self.heuristics_analysis = get_config(("analysis", "heuristics"))
|
||||
self.iocs_analysis = get_config(("analysis", "iocs"))
|
||||
self.whitelist_analysis = get_config(("analysis", "whitelist"))
|
||||
self.active_analysis = get_config(("analysis", "active"))
|
||||
self.userlang = get_config(("frontend", "user_lang"))
|
||||
self.max_ports = get_config(("analysis", "max_ports"))
|
||||
self.http_default_ports = get_config(("analysis", "http_default_ports"))
|
||||
self.tls_default_ports = get_config(("analysis", "tls_default_ports"))
|
||||
self.free_issuers = get_config(("analysis", "free_issuers"))
|
||||
self.max_alerts = get_config(("analysis", "max_alerts"))
|
||||
self.indicators_types = get_config(("analysis", "indicators_types"))
|
||||
|
||||
# Save detection methods used.
|
||||
self.detection_methods = { "iocs" : self.iocs_analysis,
|
||||
"heuristics" : self.heuristics_analysis,
|
||||
"active" : self.active_analysis }
|
||||
|
||||
# Retreive IOCs.
|
||||
if self.iocs_analysis:
|
||||
self.bl_cidrs = [[IPNetwork(cidr[0]), cidr[1]] for cidr in get_iocs("cidr")]
|
||||
self.bl_hosts = get_iocs("ip4addr") + get_iocs("ip6addr")
|
||||
self.tor_nodes = self.get_tor_nodes()
|
||||
self.bl_domains = get_iocs("domain")
|
||||
self.bl_freedns = get_iocs("freedns")
|
||||
self.bl_certs = get_iocs("sha1cert")
|
||||
self.bl_jarms = get_iocs("jarm")
|
||||
self.bl_nameservers = get_iocs("ns")
|
||||
self.bl_tlds = get_iocs("tld")
|
||||
|
||||
# Retreive whitelisted items.
|
||||
if self.whitelist_analysis:
|
||||
self.wl_cidrs = [IPNetwork(cidr) for cidr in get_whitelist("cidr")]
|
||||
self.wl_hosts = get_whitelist("ip4addr") + get_whitelist("ip6addr") + self.get_public_ip()
|
||||
self.wl_domains = get_whitelist("domain")
|
||||
|
||||
# Load template language
|
||||
if not re.match("^[a-z]{2,3}$", self.userlang): self.userlang = "en"
|
||||
|
||||
with open(os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), "locales/{}.json".format(self.userlang))) as f:
|
||||
self.template = json.load(f)["alerts"]
|
||||
|
||||
def check_internet(self) -> bool:
|
||||
"""Check the internet link just with a small http request
|
||||
to an URL present in the configuration
|
||||
|
||||
Returns:
|
||||
bool: True if everything works.
|
||||
"""
|
||||
try:
|
||||
url = get_config(("network", "internet_check"))
|
||||
requests.get(url, timeout=3)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
def get_public_ip(self) -> list:
|
||||
"""Get the public IP address
|
||||
|
||||
Returns:
|
||||
list: list containing the public IP address.
|
||||
"""
|
||||
if self.connected:
|
||||
try:
|
||||
return [requests.get("https://api.ipify.org", timeout=3).text]
|
||||
except:
|
||||
return []
|
||||
else:
|
||||
return []
|
||||
|
||||
def start_engine(self):
|
||||
""" This method starts suricata and then launch the
|
||||
parsers to analyse the output logs.
|
||||
"""
|
||||
|
||||
# Parse the eve.json file.
|
||||
self.parse_eve_file()
|
||||
|
||||
# For each type of records, check it against heuristics.
|
||||
for record in self.records:
|
||||
if self.whitelist_analysis: self.check_whitelist(record)
|
||||
self.check_domains(record)
|
||||
self.check_flow(record)
|
||||
self.check_tls(record)
|
||||
self.check_http(record)
|
||||
|
||||
# Check for failed DNS answers (if spyguard not connected)
|
||||
for dnsname in list(set(self.dns_failed)):
|
||||
self.check_dnsname(dnsname)
|
||||
|
||||
def parse_eve_file(self):
|
||||
"""This method parses the eve.json file produced by suricata.
|
||||
For each record, it look at the record type and then append the self.record
|
||||
dictionnary which contains valuable data to look at suspicious stuff.
|
||||
"""
|
||||
for record in open(f"{self.assets_dir}eve.json", "r").readlines():
|
||||
record = json.loads(record)
|
||||
try:
|
||||
if "flow" in record:
|
||||
if "app_proto" not in record: record["app_proto"] = "failed"
|
||||
proto = { "name" : record["app_proto"].upper() if record["app_proto"] != "failed" else record["proto"].upper(), "port" : record["dest_port"] if "dest_port" in record else -1 }
|
||||
|
||||
if record["dest_ip"] not in [r["ip_dst"] for r in self.records]:
|
||||
self.records.append({
|
||||
"ip_dst" : record["dest_ip"],
|
||||
"whitelisted" : False,
|
||||
"suspicious" : False,
|
||||
"protocols" : [proto],
|
||||
"domains" : [],
|
||||
"certificates" : []
|
||||
})
|
||||
else:
|
||||
for rec in self.records:
|
||||
if record["dest_ip"] == rec["ip_dst"]:
|
||||
if proto not in rec["protocols"]:
|
||||
rec["protocols"].append(proto)
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when processing the following eve record (flow): {json.dumps(record)}")
|
||||
|
||||
for record in open(f"{self.assets_dir}eve.json", "r").readlines():
|
||||
record = json.loads(record)
|
||||
try:
|
||||
if "tls" in record:
|
||||
for rec in self.records:
|
||||
if record["dest_ip"] == rec["ip_dst"]:
|
||||
if "version" in record["tls"]:
|
||||
if float(record["tls"]["version"].split(" ")[1]) < 1.3 and not "session_resumed" in record["tls"]:
|
||||
if record["tls"] not in rec["certificates"]:
|
||||
record["tls"]["port"] = record["dest_port"]
|
||||
rec["certificates"].append(record["tls"])
|
||||
else:
|
||||
if "sni" in record["tls"] and record["tls"]["sni"] not in [c["sni"] for c in rec["certificates"]]:
|
||||
rec["certificates"].append({ "sni" : record["tls"]["sni"], "version" : record["tls"]["version"], "port" : record["dest_port"] })
|
||||
else:
|
||||
rec["certificates"].append({ "version" : record["tls"]["version"], "port" : record["dest_port"] })
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when processing the following eve record (tls): {json.dumps(record)}")
|
||||
|
||||
for record in open(f"{self.assets_dir}eve.json", "r").readlines():
|
||||
record = json.loads(record)
|
||||
try:
|
||||
if "http" in record:
|
||||
for rec in self.records:
|
||||
if record["dest_ip"] == rec["ip_dst"]:
|
||||
d = { "hostname" : record["http"]["hostname"] }
|
||||
if "http_user_agent" in record["http"]:
|
||||
d["user-agent"] = record["http"]["http_user_agent"]
|
||||
if "http" in rec:
|
||||
if not d in rec["http"]:
|
||||
rec["http"].append(d)
|
||||
else:
|
||||
rec["http"] = [d]
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when processing the following eve record (http): {json.dumps(record)}")
|
||||
|
||||
for record in open(f"{self.assets_dir}eve.json", "r").readlines():
|
||||
record = json.loads(record)
|
||||
try:
|
||||
if "dns" in record:
|
||||
if record["dns"]["type"] == "answer":
|
||||
for rec in self.records:
|
||||
if record["dns"]["rcode"] == "NOERROR":
|
||||
if "grouped" in record["dns"]:
|
||||
if "A" in record["dns"]["grouped"] and rec["ip_dst"] in record["dns"]["grouped"]["A"]:
|
||||
if record["dns"]["rrname"] not in rec["domains"]:
|
||||
rec["domains"].append(record["dns"]["rrname"])
|
||||
elif "AAAA" in record["dns"]["grouped"] and rec["ip_dst"] in record["dns"]["grouped"]["AAAA"]:
|
||||
if record["dns"]["rrname"] not in rec["domains"]:
|
||||
rec["domains"].append(record["dns"]["rrname"])
|
||||
elif record["dns"]["rcode"] == "SERVFAIL":
|
||||
self.dns_failed.append(record["dns"]["rrname"])
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when processing the following eve record (dns answer): {json.dumps(record)}")
|
||||
|
||||
# This pass is if SpyGuard is not connected to Internet.
|
||||
# We still analyze the un answered DNS queries.
|
||||
for record in open(f"{self.assets_dir}eve.json", "r").readlines():
|
||||
record = json.loads(record)
|
||||
try:
|
||||
if "dns" in record:
|
||||
if record["dns"]["type"] == "query":
|
||||
if record["dns"]["rrname"] not in sum([r["domains"] for r in self.records], []):
|
||||
self.records.append({
|
||||
"ip_dst" : "--",
|
||||
"whitelisted" : False,
|
||||
"suspicious" : False,
|
||||
"protocols" : [{"name" : "DNS", "port" : "53"}],
|
||||
"domains" : [record["dns"]["rrname"]],
|
||||
"certificates" : []
|
||||
})
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when processing the following eve record (dns query): {json.dumps(record)}")
|
||||
|
||||
for record in open(f"{self.assets_dir}eve.json", "r").readlines():
|
||||
record = json.loads(record)
|
||||
try:
|
||||
if "alert" in record and record["event_type"] == "alert":
|
||||
for rec in self.records:
|
||||
if record["dest_ip"] == rec["ip_dst"]:
|
||||
rec["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["SNORT-01"]["title"].format(record["alert"]["signature"]),
|
||||
"description": self.template["SNORT-01"]["description"].format(rec["ip_dst"]),
|
||||
"host": rec["ip_dst"],
|
||||
"level": "High",
|
||||
"id": "SNORT-01"})
|
||||
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when processing the following eve record (dns answer): {json.dumps(record)}")
|
||||
|
||||
|
||||
def check_whitelist(self, record):
|
||||
""" This method is asked on each record. It:
|
||||
|
||||
1. Check if the associated IP(v4/6) Address can be whitelisted
|
||||
2. Check if one of the associated domain names can be whitelisted.
|
||||
|
||||
If its the case, the "whitelisted" key of the record is set to True.
|
||||
Therefore, the record will be ignored for the rest of the analysis.
|
||||
Args:
|
||||
record (dict): record to be processed.
|
||||
"""
|
||||
|
||||
try:
|
||||
assert IPv4Address(record["ip_dst"])
|
||||
|
||||
if IPv4Address('224.0.0.0') <= IPv4Address(record["ip_dst"]) <= IPv4Address('239.255.255.255'):
|
||||
record["whitelisted"] = True
|
||||
return
|
||||
|
||||
for cidr in self.wl_cidrs:
|
||||
if IPAddress(record["ip_dst"]) in cidr:
|
||||
record["whitelisted"] = True
|
||||
return
|
||||
|
||||
for ip in self.wl_hosts:
|
||||
if record["ip_dst"] == ip:
|
||||
record["whitelisted"] = True
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
assert IPv6Address(record["ip_dst"])
|
||||
|
||||
if [record["ip_dst"].startswith(prefix) for prefix in ["fe80", "fc00", "ff02"]]:
|
||||
record["whitelisted"] = True
|
||||
return
|
||||
|
||||
for ip in self.wl_hosts:
|
||||
if record["ip_dst"] == ip:
|
||||
record["whitelisted"] = True
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
# We check if at least one of the associated
|
||||
# domains is whitelisted
|
||||
for dom in self.wl_domains:
|
||||
for domain in record["domains"]:
|
||||
if domain.endswith(dom):
|
||||
record["whitelisted"] = True
|
||||
return
|
||||
|
||||
def check_domains(self, record):
|
||||
"""Check the domains associated to each record.
|
||||
First this method checks if the record is whitelisted. If not:
|
||||
1. Leverage a low alert if the record don't have any associated DNSName
|
||||
2. Check each domain associated to the record by calling check_dnsname.
|
||||
Args:
|
||||
record (dict): record to be processed.
|
||||
"""
|
||||
if record["whitelisted"]: return
|
||||
|
||||
if self.heuristics_analysis:
|
||||
# Otherwise, we alert the user that an IP haven't been resolved by
|
||||
# a DNS answer during the session...
|
||||
if record["domains"] == []:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["PROTO-05"]["title"].format(record["ip_dst"]),
|
||||
"description": self.template["PROTO-05"]["description"].format(record["ip_dst"]),
|
||||
"host": record["ip_dst"],
|
||||
"level": "Low",
|
||||
"id": "PROTO-05"})
|
||||
|
||||
# Check each associated domain.
|
||||
for domain in record["domains"]:
|
||||
if self.check_dnsname(domain):
|
||||
record["suspicious"] = True
|
||||
|
||||
def check_dnsname(self, dnsname):
|
||||
"""Check a domain name against a set of IOCs / heuristics.
|
||||
1. Check if the parent domain is blacklisted.
|
||||
2. Check if the parent domain is a Free DNS.
|
||||
3. Check if the domain extension is a suspicious TLD.
|
||||
4. Check if the name servers associated to the domain are suspicious.
|
||||
5. Check if the domain have been registered recently - less than one year.
|
||||
Args:
|
||||
record (dict): record to be processed.
|
||||
Returns:
|
||||
supicious (bool) : if an alert has been leveraged.
|
||||
"""
|
||||
suspicious = False
|
||||
|
||||
if self.iocs_analysis:
|
||||
for domain in self.bl_domains:
|
||||
if dnsname.endswith(domain[0]) and any(t in self.indicators_types for t in [domain[1], "all"]):
|
||||
if domain[1] == "dual":
|
||||
suspicious = True
|
||||
self.alerts.append({"title": self.template["IOC-12"]["title"],
|
||||
"description": self.template["IOC-12"]["description"].format(domain[0]),
|
||||
"host": domain[0],
|
||||
"level": "Low",
|
||||
"id": "IOC-12"})
|
||||
elif domain[1] == "tracker":
|
||||
suspicious = True
|
||||
self.alerts.append({"title": self.template["IOC-04"]["title"].format(domain[0], "tracker"),
|
||||
"description": self.template["IOC-04"]["description"].format(domain[0], "tracker"),
|
||||
"host": domain[0],
|
||||
"level": "Low",
|
||||
"id": "IOC-04"})
|
||||
elif domain[1] == "doh":
|
||||
suspicious = True
|
||||
self.alerts.append({"title": self.template["IOC-13"]["title"].format(f"{dnsname}"),
|
||||
"description": self.template["IOC-13"]["description"].format(f"{dnsname}"),
|
||||
"host": dnsname,
|
||||
"level": "Low",
|
||||
"id": "IOC-13"})
|
||||
else:
|
||||
suspicious = True
|
||||
self.alerts.append({"title": self.template["IOC-03"]["title"].format(dnsname, domain[1].upper()),
|
||||
"description": self.template["IOC-03"]["description"].format(dnsname),
|
||||
"host": dnsname,
|
||||
"level": "High",
|
||||
"id": "IOC-03"})
|
||||
for domain in self.bl_freedns:
|
||||
if dnsname.endswith(domain[0]) and any(t in self.indicators_types for t in [domain[1], "all"]):
|
||||
suspicious = True
|
||||
self.alerts.append({"title": self.template["IOC-05"]["title"].format(dnsname),
|
||||
"description": self.template["IOC-05"]["description"].format(dnsname),
|
||||
"host": dnsname,
|
||||
"level": "Moderate",
|
||||
"id": "IOC-05"})
|
||||
|
||||
if self.heuristics_analysis:
|
||||
for domain in self.bl_tlds:
|
||||
if dnsname.endswith(domain[0]) and any(t in self.indicators_types for t in [domain[1], "all"]):
|
||||
suspicious = True
|
||||
self.alerts.append({"title": self.template["IOC-06"]["title"].format(dnsname),
|
||||
"description": self.template["IOC-06"]["description"].format(dnsname, domain[0]),
|
||||
"host": dnsname,
|
||||
"level": "Low",
|
||||
"id": "IOC-06"})
|
||||
|
||||
if self.active_analysis and self.connected:
|
||||
domain = get_sld(dnsname)
|
||||
if domain not in self.dns_checked:
|
||||
self.dns_checked.append(domain)
|
||||
try:
|
||||
name_servers = pydig.query(domain, "NS")
|
||||
if len(name_servers):
|
||||
for ns in self.bl_nameservers:
|
||||
if name_servers[0].endswith(".{}.".format(ns[0])) and any(t in self.indicators_types for t in [ns[1], "all"]):
|
||||
suspicious = True
|
||||
self.alerts.append({"title": self.template["ACT-01"]["title"].format(dnsname, name_servers[0]),
|
||||
"description": self.template["ACT-01"]["description"].format(dnsname),
|
||||
"host": dnsname,
|
||||
"level": "Moderate",
|
||||
"id": "ACT-01"})
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when doing a dig NS query to {domain}, are you connected? Error: {str(e)}")
|
||||
|
||||
try:
|
||||
whois_record = whois.whois(domain)
|
||||
creation_date = whois_record.creation_date if type(whois_record.creation_date) is not list else whois_record.creation_date[0]
|
||||
creation_days = abs((datetime.now() - creation_date).days)
|
||||
if creation_days < 365:
|
||||
suspicious = True
|
||||
self.alerts.append({"title": self.template["ACT-02"]["title"].format(dnsname, creation_days),
|
||||
"description": self.template["ACT-02"]["description"].format(dnsname),
|
||||
"host": dnsname,
|
||||
"level": "Moderate",
|
||||
"id": "ACT-02"})
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when doing a WHOIS query to {domain}, are you connected? Error: {str(e)}")
|
||||
|
||||
return suspicious
|
||||
|
||||
|
||||
def check_flow(self, record):
|
||||
"""Check a network flow against a set of IOCs / heuristics.
|
||||
1. Check if the IP Address is blacklisted
|
||||
2. Check if the IP Address is inside a blacklisted CIDR
|
||||
3. Check if the UDP or ICMP protocol is going outside of the local network.
|
||||
4. Check if the HTTP protocol is not using default HTTP ports.
|
||||
5. Check if the network flow is using a port > 1024.
|
||||
Args:
|
||||
record (dict): record to be processed.
|
||||
Returns:
|
||||
supicious (bool) : if an alert has been leveraged.
|
||||
"""
|
||||
if record["whitelisted"]: return
|
||||
|
||||
resolved_host = record["domains"][0] if len(record["domains"]) else record["ip_dst"]
|
||||
|
||||
if self.iocs_analysis:
|
||||
for host in self.bl_hosts:
|
||||
if record["ip_dst"] == host[0] and any(t in self.indicators_types for t in [host[1], "all"]):
|
||||
if host[1] == "dual":
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["IOC-12"]["title"],
|
||||
"description": self.template["IOC-12"]["description"].format(resolved_host),
|
||||
"host": resolved_host,
|
||||
"level": "Low",
|
||||
"id": "IOC-12"})
|
||||
if host[1] == "tracker":
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["IOC-04"]["title"].format(resolved_host, "tracker"),
|
||||
"description": self.template["IOC-04"]["description"].format(resolved_host, "tracker"),
|
||||
"host": resolved_host,
|
||||
"level": "Low",
|
||||
"id": "IOC-04"})
|
||||
elif host[1] == "doh":
|
||||
if 443 in [p["port"] for p in record["protocols"]]:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["IOC-13"]["title"].format(f"{resolved_host}"),
|
||||
"description": self.template["IOC-13"]["description"].format(f"{resolved_host}"),
|
||||
"host": resolved_host,
|
||||
"level": "Low",
|
||||
"id": "IOC-13"})
|
||||
else:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["IOC-01"]["title"].format(resolved_host, record["ip_dst"], host[1].upper()),
|
||||
"description": self.template["IOC-01"]["description"].format(f"{resolved_host} ({record['ip_dst']})"),
|
||||
"host": resolved_host,
|
||||
"level": "High",
|
||||
"id": "IOC-01"})
|
||||
break
|
||||
|
||||
for host in self.tor_nodes:
|
||||
if record["ip_dst"] == host:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["IOC-11"]["title"].format(resolved_host, record["ip_dst"]),
|
||||
"description": self.template["IOC-11"]["description"].format(f"{resolved_host} ({record['ip_dst']})"),
|
||||
"host": resolved_host,
|
||||
"level": "High",
|
||||
"id": "IOC-11"})
|
||||
break
|
||||
|
||||
for cidr in self.bl_cidrs:
|
||||
try:
|
||||
if IPAddress(record["ip_dst"]) in cidr[0] and any(t in self.indicators_types for t in [cidr[1], "all"]):
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["IOC-02"]["title"].format(resolved_host, cidr[0], cidr[1].upper()),
|
||||
"description": self.template["IOC-02"]["description"].format(record["ip_dst"]),
|
||||
"host": resolved_host,
|
||||
"level": "Moderate",
|
||||
"id": "IOC-02"})
|
||||
except:
|
||||
continue
|
||||
|
||||
if self.heuristics_analysis:
|
||||
for protocol in record["protocols"]:
|
||||
if protocol["name"] in ["UDP", "ICMP", "IPV6-ICMP"]:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["PROTO-01"]["title"].format(protocol["name"], resolved_host),
|
||||
"description": self.template["PROTO-01"]["description"].format(protocol["name"], resolved_host),
|
||||
"host": resolved_host,
|
||||
"level": "Moderate",
|
||||
"id": "PROTO-01"})
|
||||
try:
|
||||
if protocol["port"] >= int(self.max_ports):
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["PROTO-02"]["title"].format("", resolved_host, self.max_ports),
|
||||
"description": self.template["PROTO-02"]["description"].format("", resolved_host, protocol["port"]),
|
||||
"host": resolved_host,
|
||||
"level": "Low",
|
||||
"id": "PROTO-02"})
|
||||
except:
|
||||
pass
|
||||
|
||||
if protocol["name"] == "HTTP":
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["PROTO-03"]["title"].format(resolved_host),
|
||||
"description": self.template["PROTO-03"]["description"].format(resolved_host),
|
||||
"host": resolved_host,
|
||||
"level": "Low",
|
||||
"id": "PROTO-03"})
|
||||
|
||||
if protocol["name"] == "HTTP" and protocol["port"] not in self.http_default_ports:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["PROTO-04"]["title"].format(resolved_host, protocol["port"]),
|
||||
"description": self.template["PROTO-04"]["description"].format(resolved_host, protocol["port"]),
|
||||
"host": resolved_host,
|
||||
"level": "Moderate",
|
||||
"id": "PROTO-04"})
|
||||
|
||||
def check_tls(self, record):
|
||||
"""Check a TLS protocol and certificates against a set of IOCs / heuristics.
|
||||
Note since TLS 1.3, the certificate is not exchanged in clear text, therefore
|
||||
we need to check it "actively" via the method active_check_ssl.
|
||||
|
||||
1. Check if the TLS record is not using default TLS ports.
|
||||
2. Check if one of the certificates is a free one, like Let's Encrypt.
|
||||
3. Check if the certificate is auto-signed.
|
||||
4. If the certificate has an SNI, check the domain by calling check_dnsname.
|
||||
Args:
|
||||
record (dict): record to be processed.
|
||||
Returns:
|
||||
supicious (bool) : if an alert has been leveraged.
|
||||
"""
|
||||
if record["whitelisted"]: return
|
||||
|
||||
resolved_host = record["domains"][0] if len(record["domains"]) else record["ip_dst"]
|
||||
|
||||
for certificate in record["certificates"]:
|
||||
|
||||
try:
|
||||
if "sni" in certificate and certificate["sni"] not in record["domains"]:
|
||||
if certificate["sni"]:
|
||||
if self.check_dnsname(certificate["sni"]):
|
||||
record["suspicious"] = True
|
||||
|
||||
if certificate["port"] not in self.tls_default_ports:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["SSL-01"]["title"].format(certificate["port"], resolved_host),
|
||||
"description": self.template["SSL-01"]["description"].format(resolved_host),
|
||||
"host": resolved_host,
|
||||
"level": "Moderate",
|
||||
"id": "SSL-01"})
|
||||
|
||||
if float(certificate["version"].split(" ")[1]) < 1.3 and "issuerdn" in certificate:
|
||||
|
||||
if certificate["issuerdn"] in self.free_issuers:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["SSL-02"]["title"].format(resolved_host),
|
||||
"description": self.template["SSL-02"]["description"],
|
||||
"host": resolved_host,
|
||||
"level": "Moderate",
|
||||
"id": "SSL-02"})
|
||||
|
||||
elif certificate["issuerdn"] == certificate["subject"]:
|
||||
record["suspicious"] = True
|
||||
self.alerts.append({"title": self.template["SSL-03"]["title"].format(resolved_host),
|
||||
"description": self.template["SSL-03"]["description"].format(resolved_host),
|
||||
"host": resolved_host,
|
||||
"level": "Moderate",
|
||||
"id": "SSL-03"})
|
||||
else:
|
||||
if self.active_analysis and self.connected:
|
||||
if "sni" in certificate:
|
||||
if certificate["sni"] not in self.cert_checked:
|
||||
self.cert_checked.append(certificate["sni"])
|
||||
if self.active_check_ssl(certificate["sni"], certificate["port"]):
|
||||
record["suspicious"] = True
|
||||
break
|
||||
else:
|
||||
if resolved_host not in self.cert_checked:
|
||||
self.cert_checked.append(resolved_host)
|
||||
if self.active_check_ssl(resolved_host, certificate["port"]):
|
||||
record["suspicious"] = True
|
||||
break
|
||||
except Exception as e:
|
||||
self.errors.append(f"Issue when processing the following certificate (check_tls): {json.dumps(certificate)}")
|
||||
|
||||
def get_tor_nodes(self) -> list:
|
||||
"""Get a list of TOR nodes from dan.me.uk.
|
||||
|
||||
Returns:
|
||||
list: list of TOR nodes
|
||||
"""
|
||||
|
||||
nodes = []
|
||||
if os.path.exists("/tmp/tor_nodes.lst"):
|
||||
with open("/tmp/tor_nodes.lst", "r") as f:
|
||||
for l in f.readlines():
|
||||
nodes.append(l.strip())
|
||||
else:
|
||||
if self.connected:
|
||||
try:
|
||||
nodes_list = requests.get("https://www.dan.me.uk/torlist/", timeout=10).text
|
||||
with open("/tmp/tor_nodes.lst", "w+") as f:
|
||||
f.write(nodes_list)
|
||||
for l in nodes_list.splitlines():
|
||||
nodes.append(l.strip())
|
||||
except:
|
||||
self.errors.append(f"Issue when trying to get TOR nodes from dan.me.uk")
|
||||
return nodes
|
||||
|
||||
|
||||
def check_http(self, record):
|
||||
"""Check the HTTP hostname against a set of IOCs / heuristics.
|
||||
Args:
|
||||
record (dict): record to be processed.
|
||||
Returns:
|
||||
supicious (bool) : if an alert has been leveraged.
|
||||
"""
|
||||
if record["whitelisted"]: return
|
||||
|
||||
if "http" in record:
|
||||
for http in record["http"]:
|
||||
if http["hostname"] not in record["domains"]:
|
||||
if re.match("^[a-z\.0-9\-]+\.[a-z\-]{2,}$", http["hostname"]):
|
||||
if http["hostname"]:
|
||||
if self.check_dnsname(http["hostname"]):
|
||||
record["suspicious"] = True
|
||||
|
||||
def active_check_ssl(self, host, port):
|
||||
"""This method:
|
||||
|
||||
1. Check the issuer and subject of a certificate directly by connecting
|
||||
to the remote server in order to bypass TLS 1.3+ restrictions.
|
||||
Most of this method was been taken from: https://tinyurl.com/3vsvhu79
|
||||
|
||||
2. Get the JARM of the remote server by using the standard poc library
|
||||
from sales force.
|
||||
|
||||
Args:
|
||||
host (str): Host to connect to
|
||||
port (int): Port to connect to
|
||||
"""
|
||||
try:
|
||||
suspect = False
|
||||
context = ssl.create_default_context()
|
||||
conn = socket.create_connection((host, port))
|
||||
sock = context.wrap_socket(conn, server_hostname=host)
|
||||
sock.settimeout(5)
|
||||
try:
|
||||
der_cert = sock.getpeercert(True)
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
if "der_cert" in locals():
|
||||
|
||||
certificate = ssl.DER_cert_to_PEM_cert(der_cert)
|
||||
x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, certificate)
|
||||
|
||||
issuer = dict(x509.get_issuer().get_components())
|
||||
subject = dict(x509.get_subject().get_components())
|
||||
certhash = x509.digest("sha1").decode("utf8").replace(":", "").lower()
|
||||
issuer = ", ".join(f"{k.decode('utf8')}={v.decode('utf8')}" for k, v in issuer.items())
|
||||
subject = ", ".join(f"{k.decode('utf8')}={v.decode('utf8')}" for k, v in subject.items())
|
||||
|
||||
if issuer in self.free_issuers:
|
||||
self.alerts.append({"title": self.template["SSL-02"]["title"].format(host),
|
||||
"description": self.template["SSL-02"]["description"],
|
||||
"host": host,
|
||||
"level": "Moderate",
|
||||
"id": "SSL-02"})
|
||||
suspect = True
|
||||
|
||||
if issuer == subject:
|
||||
self.alerts.append({"title": self.template["SSL-03"]["title"].format(host),
|
||||
"description": self.template["SSL-03"]["description"].format(host),
|
||||
"host": host,
|
||||
"level": "Moderate",
|
||||
"id": "SSL-03"})
|
||||
suspect = True
|
||||
|
||||
if self.iocs_analysis:
|
||||
for cert in self.bl_certs:
|
||||
if cert[0] == certhash and any(t in self.indicators_types for t in [cert[1], "all"]):
|
||||
self.alerts.append({"title": self.template["SSL-04"]["title"].format(host, cert[1].upper()),
|
||||
"description": self.template["SSL-04"]["description"].format(host),
|
||||
"host": host,
|
||||
"level": "High",
|
||||
"id": "SSL-04"})
|
||||
suspect = True
|
||||
|
||||
if self.bl_jarms:
|
||||
host_jarm = get_jarm(host, port)
|
||||
for jarm in self.bl_jarms:
|
||||
if jarm[0] == host_jarm and any(t in self.indicators_types for t in [jarm[1], "all"]):
|
||||
self.alerts.append({"title": self.template["SSL-05"]["title"].format(host, cert[1].upper()),
|
||||
"description": self.template["SSL-05"]["description"].format(host),
|
||||
"host": host,
|
||||
"level": "High",
|
||||
"id": "SSL-05"})
|
||||
suspect = True
|
||||
return suspect
|
||||
except:
|
||||
self.errors.append(f"Issue when trying to grab the SSL certificate located at {host}:{port}")
|
||||
return False
|
||||
|
||||
def get_alerts(self):
|
||||
"""Retrieves the alerts triggered during the analysis
|
||||
|
||||
Returns:
|
||||
list: list of the alerts.
|
||||
"""
|
||||
self.analysis_end = datetime.now()
|
||||
return [dict(t) for t in {tuple(d.items()) for d in self.alerts}]
|
477
analysis/classes/jarm.py
Normal file
477
analysis/classes/jarm.py
Normal file
@ -0,0 +1,477 @@
|
||||
# Version 1.0 (November 2020)
|
||||
#
|
||||
# Created by:
|
||||
# John Althouse
|
||||
# Andrew Smart
|
||||
# RJ Nunaly
|
||||
# Mike Brady
|
||||
#
|
||||
# Converted to Python by:
|
||||
# Caleb Yu
|
||||
#
|
||||
# Copyright (c) 2020, salesforce.com, inc.
|
||||
# All rights reserved.
|
||||
# Licensed under the BSD 3-Clause license.
|
||||
# For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import codecs
|
||||
import socket
|
||||
import struct
|
||||
import os
|
||||
import random
|
||||
import hashlib
|
||||
import ipaddress
|
||||
|
||||
#Randomly choose a grease value
|
||||
def choose_grease():
|
||||
grease_list = [b"\x0a\x0a", b"\x1a\x1a", b"\x2a\x2a", b"\x3a\x3a", b"\x4a\x4a", b"\x5a\x5a", b"\x6a\x6a", b"\x7a\x7a", b"\x8a\x8a", b"\x9a\x9a", b"\xaa\xaa", b"\xba\xba", b"\xca\xca", b"\xda\xda", b"\xea\xea", b"\xfa\xfa"]
|
||||
return random.choice(grease_list)
|
||||
|
||||
def packet_building(jarm_details):
|
||||
payload = b"\x16"
|
||||
#Version Check
|
||||
if jarm_details[2] == "TLS_1.3":
|
||||
payload += b"\x03\x01"
|
||||
client_hello = b"\x03\x03"
|
||||
elif jarm_details[2] == "SSLv3":
|
||||
payload += b"\x03\x00"
|
||||
client_hello = b"\x03\x00"
|
||||
elif jarm_details[2] == "TLS_1":
|
||||
payload += b"\x03\x01"
|
||||
client_hello = b"\x03\x01"
|
||||
elif jarm_details[2] == "TLS_1.1":
|
||||
payload += b"\x03\x02"
|
||||
client_hello = b"\x03\x02"
|
||||
elif jarm_details[2] == "TLS_1.2":
|
||||
payload += b"\x03\x03"
|
||||
client_hello = b"\x03\x03"
|
||||
#Random values in client hello
|
||||
client_hello += os.urandom(32)
|
||||
session_id = os.urandom(32)
|
||||
session_id_length = struct.pack(">B", len(session_id))
|
||||
client_hello += session_id_length
|
||||
client_hello += session_id
|
||||
#Get ciphers
|
||||
cipher_choice = get_ciphers(jarm_details)
|
||||
client_suites_length = struct.pack(">H", len(cipher_choice))
|
||||
client_hello += client_suites_length
|
||||
client_hello += cipher_choice
|
||||
client_hello += b"\x01" #cipher methods
|
||||
client_hello += b"\x00" #compression_methods
|
||||
#Add extensions to client hello
|
||||
extensions = get_extensions(jarm_details)
|
||||
client_hello += extensions
|
||||
#Finish packet assembly
|
||||
inner_length = b"\x00"
|
||||
inner_length += struct.pack(">H", len(client_hello))
|
||||
handshake_protocol = b"\x01"
|
||||
handshake_protocol += inner_length
|
||||
handshake_protocol += client_hello
|
||||
outer_length = struct.pack(">H", len(handshake_protocol))
|
||||
payload += outer_length
|
||||
payload += handshake_protocol
|
||||
return payload
|
||||
|
||||
def get_ciphers(jarm_details):
|
||||
selected_ciphers = b""
|
||||
#Two cipher lists: NO1.3 and ALL
|
||||
if jarm_details[3] == "ALL":
|
||||
list = [b"\x00\x16", b"\x00\x33", b"\x00\x67", b"\xc0\x9e", b"\xc0\xa2", b"\x00\x9e", b"\x00\x39", b"\x00\x6b", b"\xc0\x9f", b"\xc0\xa3", b"\x00\x9f", b"\x00\x45", b"\x00\xbe", b"\x00\x88", b"\x00\xc4", b"\x00\x9a", b"\xc0\x08", b"\xc0\x09", b"\xc0\x23", b"\xc0\xac", b"\xc0\xae", b"\xc0\x2b", b"\xc0\x0a", b"\xc0\x24", b"\xc0\xad", b"\xc0\xaf", b"\xc0\x2c", b"\xc0\x72", b"\xc0\x73", b"\xcc\xa9", b"\x13\x02", b"\x13\x01", b"\xcc\x14", b"\xc0\x07", b"\xc0\x12", b"\xc0\x13", b"\xc0\x27", b"\xc0\x2f", b"\xc0\x14", b"\xc0\x28", b"\xc0\x30", b"\xc0\x60", b"\xc0\x61", b"\xc0\x76", b"\xc0\x77", b"\xcc\xa8", b"\x13\x05", b"\x13\x04", b"\x13\x03", b"\xcc\x13", b"\xc0\x11", b"\x00\x0a", b"\x00\x2f", b"\x00\x3c", b"\xc0\x9c", b"\xc0\xa0", b"\x00\x9c", b"\x00\x35", b"\x00\x3d", b"\xc0\x9d", b"\xc0\xa1", b"\x00\x9d", b"\x00\x41", b"\x00\xba", b"\x00\x84", b"\x00\xc0", b"\x00\x07", b"\x00\x04", b"\x00\x05"]
|
||||
elif jarm_details[3] == "NO1.3":
|
||||
list = [b"\x00\x16", b"\x00\x33", b"\x00\x67", b"\xc0\x9e", b"\xc0\xa2", b"\x00\x9e", b"\x00\x39", b"\x00\x6b", b"\xc0\x9f", b"\xc0\xa3", b"\x00\x9f", b"\x00\x45", b"\x00\xbe", b"\x00\x88", b"\x00\xc4", b"\x00\x9a", b"\xc0\x08", b"\xc0\x09", b"\xc0\x23", b"\xc0\xac", b"\xc0\xae", b"\xc0\x2b", b"\xc0\x0a", b"\xc0\x24", b"\xc0\xad", b"\xc0\xaf", b"\xc0\x2c", b"\xc0\x72", b"\xc0\x73", b"\xcc\xa9", b"\xcc\x14", b"\xc0\x07", b"\xc0\x12", b"\xc0\x13", b"\xc0\x27", b"\xc0\x2f", b"\xc0\x14", b"\xc0\x28", b"\xc0\x30", b"\xc0\x60", b"\xc0\x61", b"\xc0\x76", b"\xc0\x77", b"\xcc\xa8", b"\xcc\x13", b"\xc0\x11", b"\x00\x0a", b"\x00\x2f", b"\x00\x3c", b"\xc0\x9c", b"\xc0\xa0", b"\x00\x9c", b"\x00\x35", b"\x00\x3d", b"\xc0\x9d", b"\xc0\xa1", b"\x00\x9d", b"\x00\x41", b"\x00\xba", b"\x00\x84", b"\x00\xc0", b"\x00\x07", b"\x00\x04", b"\x00\x05"]
|
||||
#Change cipher order
|
||||
if jarm_details[4] != "FORWARD":
|
||||
list = cipher_mung(list, jarm_details[4])
|
||||
#Add GREASE to beginning of cipher list (if applicable)
|
||||
if jarm_details[5] == "GREASE":
|
||||
list.insert(0,choose_grease())
|
||||
#Generate cipher list
|
||||
for cipher in list:
|
||||
selected_ciphers += cipher
|
||||
return selected_ciphers
|
||||
|
||||
def cipher_mung(ciphers, request):
|
||||
output = []
|
||||
cipher_len = len(ciphers)
|
||||
#Ciphers backward
|
||||
if (request == "REVERSE"):
|
||||
output = ciphers[::-1]
|
||||
#Bottom half of ciphers
|
||||
elif (request == "BOTTOM_HALF"):
|
||||
if (cipher_len % 2 == 1):
|
||||
output = ciphers[int(cipher_len/2)+1:]
|
||||
else:
|
||||
output = ciphers[int(cipher_len/2):]
|
||||
#Top half of ciphers in reverse order
|
||||
elif (request == "TOP_HALF"):
|
||||
if (cipher_len % 2 == 1):
|
||||
output.append(ciphers[int(cipher_len/2)])
|
||||
#Top half gets the middle cipher
|
||||
output += cipher_mung(cipher_mung(ciphers, "REVERSE"),"BOTTOM_HALF")
|
||||
#Middle-out cipher order
|
||||
elif (request == "MIDDLE_OUT"):
|
||||
middle = int(cipher_len/2)
|
||||
# if ciphers are uneven, start with the center. Second half before first half
|
||||
if (cipher_len % 2 == 1):
|
||||
output.append(ciphers[middle])
|
||||
for i in range(1, middle+1):
|
||||
output.append(ciphers[middle + i])
|
||||
output.append(ciphers[middle - i])
|
||||
else:
|
||||
for i in range(1, middle+1):
|
||||
output.append(ciphers[middle-1 + i])
|
||||
output.append(ciphers[middle - i])
|
||||
return output
|
||||
|
||||
def get_extensions(jarm_details):
|
||||
extension_bytes = b""
|
||||
all_extensions = b""
|
||||
grease = False
|
||||
#GREASE
|
||||
if jarm_details[5] == "GREASE":
|
||||
all_extensions += choose_grease()
|
||||
all_extensions += b"\x00\x00"
|
||||
grease = True
|
||||
#Server name
|
||||
all_extensions += extension_server_name(jarm_details[0])
|
||||
#Other extensions
|
||||
extended_master_secret = b"\x00\x17\x00\x00"
|
||||
all_extensions += extended_master_secret
|
||||
max_fragment_length = b"\x00\x01\x00\x01\x01"
|
||||
all_extensions += max_fragment_length
|
||||
renegotiation_info = b"\xff\x01\x00\x01\x00"
|
||||
all_extensions += renegotiation_info
|
||||
supported_groups = b"\x00\x0a\x00\x0a\x00\x08\x00\x1d\x00\x17\x00\x18\x00\x19"
|
||||
all_extensions += supported_groups
|
||||
ec_point_formats = b"\x00\x0b\x00\x02\x01\x00"
|
||||
all_extensions += ec_point_formats
|
||||
session_ticket = b"\x00\x23\x00\x00"
|
||||
all_extensions += session_ticket
|
||||
#Application Layer Protocol Negotiation extension
|
||||
all_extensions += app_layer_proto_negotiation(jarm_details)
|
||||
signature_algorithms = b"\x00\x0d\x00\x14\x00\x12\x04\x03\x08\x04\x04\x01\x05\x03\x08\x05\x05\x01\x08\x06\x06\x01\x02\x01"
|
||||
all_extensions += signature_algorithms
|
||||
#Key share extension
|
||||
all_extensions += key_share(grease)
|
||||
psk_key_exchange_modes = b"\x00\x2d\x00\x02\x01\x01"
|
||||
all_extensions += psk_key_exchange_modes
|
||||
#Supported versions extension
|
||||
if (jarm_details[2] == "TLS_1.3") or (jarm_details[7] == "1.2_SUPPORT"):
|
||||
all_extensions += supported_versions(jarm_details, grease)
|
||||
#Finish assembling extensions
|
||||
extension_length = len(all_extensions)
|
||||
extension_bytes += struct.pack(">H", extension_length)
|
||||
extension_bytes += all_extensions
|
||||
return extension_bytes
|
||||
|
||||
#Client hello server name extension
|
||||
def extension_server_name(host):
|
||||
ext_sni = b"\x00\x00"
|
||||
ext_sni_length = len(host)+5
|
||||
ext_sni += struct.pack(">H", ext_sni_length)
|
||||
ext_sni_length2 = len(host)+3
|
||||
ext_sni += struct.pack(">H", ext_sni_length2)
|
||||
ext_sni += b"\x00"
|
||||
ext_sni_length3 = len(host)
|
||||
ext_sni += struct.pack(">H", ext_sni_length3)
|
||||
ext_sni += host.encode()
|
||||
return ext_sni
|
||||
|
||||
#Client hello apln extension
|
||||
def app_layer_proto_negotiation(jarm_details):
|
||||
ext = b"\x00\x10"
|
||||
if (jarm_details[6] == "RARE_APLN"):
|
||||
#Removes h2 and http/1.1
|
||||
alpns = [b"\x08\x68\x74\x74\x70\x2f\x30\x2e\x39", b"\x08\x68\x74\x74\x70\x2f\x31\x2e\x30", b"\x06\x73\x70\x64\x79\x2f\x31", b"\x06\x73\x70\x64\x79\x2f\x32", b"\x06\x73\x70\x64\x79\x2f\x33", b"\x03\x68\x32\x63", b"\x02\x68\x71"]
|
||||
else:
|
||||
#All apln extensions in order from weakest to strongest
|
||||
alpns = [b"\x08\x68\x74\x74\x70\x2f\x30\x2e\x39", b"\x08\x68\x74\x74\x70\x2f\x31\x2e\x30", b"\x08\x68\x74\x74\x70\x2f\x31\x2e\x31", b"\x06\x73\x70\x64\x79\x2f\x31", b"\x06\x73\x70\x64\x79\x2f\x32", b"\x06\x73\x70\x64\x79\x2f\x33", b"\x02\x68\x32", b"\x03\x68\x32\x63", b"\x02\x68\x71"]
|
||||
#apln extensions can be reordered
|
||||
if jarm_details[8] != "FORWARD":
|
||||
alpns = cipher_mung(alpns, jarm_details[8])
|
||||
all_alpns = b""
|
||||
for alpn in alpns:
|
||||
all_alpns += alpn
|
||||
second_length = len(all_alpns)
|
||||
first_length = second_length+2
|
||||
ext += struct.pack(">H", first_length)
|
||||
ext += struct.pack(">H", second_length)
|
||||
ext += all_alpns
|
||||
return ext
|
||||
|
||||
#Generate key share extension for client hello
|
||||
def key_share(grease):
|
||||
ext = b"\x00\x33"
|
||||
#Add grease value if necessary
|
||||
if grease == True:
|
||||
share_ext = choose_grease()
|
||||
share_ext += b"\x00\x01\x00"
|
||||
else:
|
||||
share_ext = b""
|
||||
group = b"\x00\x1d"
|
||||
share_ext += group
|
||||
key_exchange_length = b"\x00\x20"
|
||||
share_ext += key_exchange_length
|
||||
share_ext += os.urandom(32)
|
||||
second_length = len(share_ext)
|
||||
first_length = second_length+2
|
||||
ext += struct.pack(">H", first_length)
|
||||
ext += struct.pack(">H", second_length)
|
||||
ext += share_ext
|
||||
return ext
|
||||
|
||||
#Supported version extension for client hello
|
||||
def supported_versions(jarm_details, grease):
|
||||
if (jarm_details[7] == "1.2_SUPPORT"):
|
||||
#TLS 1.3 is not supported
|
||||
tls = [b"\x03\x01", b"\x03\x02", b"\x03\x03"]
|
||||
else:
|
||||
#TLS 1.3 is supported
|
||||
tls = [b"\x03\x01", b"\x03\x02", b"\x03\x03", b"\x03\x04"]
|
||||
#Change supported version order, by default, the versions are from oldest to newest
|
||||
if jarm_details[8] != "FORWARD":
|
||||
tls = cipher_mung(tls, jarm_details[8])
|
||||
#Assemble the extension
|
||||
ext = b"\x00\x2b"
|
||||
#Add GREASE if applicable
|
||||
if grease == True:
|
||||
versions = choose_grease()
|
||||
else:
|
||||
versions = b""
|
||||
for version in tls:
|
||||
versions += version
|
||||
second_length = len(versions)
|
||||
first_length = second_length+1
|
||||
ext += struct.pack(">H", first_length)
|
||||
ext += struct.pack(">B", second_length)
|
||||
ext += versions
|
||||
return ext
|
||||
|
||||
#Send the assembled client hello using a socket
|
||||
def send_packet(packet, destination_host, destination_port):
|
||||
try:
|
||||
#Determine if the input is an IP or domain name
|
||||
try:
|
||||
if (type(ipaddress.ip_address(destination_host)) == ipaddress.IPv4Address) or (type(ipaddress.ip_address(destination_host)) == ipaddress.IPv6Address):
|
||||
raw_ip = True
|
||||
ip = (destination_host, destination_port)
|
||||
except ValueError as e:
|
||||
ip = (None, None)
|
||||
raw_ip = False
|
||||
#Connect the socket
|
||||
if ":" in destination_host:
|
||||
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
|
||||
sock.settimeout(10)
|
||||
sock.connect((destination_host, destination_port, 0, 0))
|
||||
else:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(10)
|
||||
sock.connect((destination_host, destination_port))
|
||||
#Resolve IP if given a domain name
|
||||
if raw_ip == False:
|
||||
ip = sock.getpeername()
|
||||
sock.sendall(packet)
|
||||
#Receive server hello
|
||||
data = sock.recv(1484)
|
||||
#Close socket
|
||||
sock.shutdown(socket.SHUT_RDWR)
|
||||
sock.close()
|
||||
return bytearray(data), ip[0]
|
||||
#Timeout errors result in an empty hash
|
||||
except socket.timeout as e:
|
||||
sock.close()
|
||||
return "TIMEOUT", ip[0]
|
||||
except Exception as e:
|
||||
sock.close()
|
||||
return None, ip[0]
|
||||
|
||||
#If a packet is received, decipher the details
|
||||
def read_packet(data, jarm_details):
|
||||
try:
|
||||
if data == None:
|
||||
return "|||"
|
||||
jarm = ""
|
||||
#Server hello error
|
||||
if data[0] == 21:
|
||||
selected_cipher = b""
|
||||
return "|||"
|
||||
#Check for server hello
|
||||
elif (data[0] == 22) and (data[5] == 2):
|
||||
server_hello_length = int.from_bytes(data[3:5], "big")
|
||||
counter = data[43]
|
||||
#Find server's selected cipher
|
||||
selected_cipher = data[counter+44:counter+46]
|
||||
#Find server's selected version
|
||||
version = data[9:11]
|
||||
#Format
|
||||
jarm += codecs.encode(selected_cipher, 'hex').decode('ascii')
|
||||
jarm += "|"
|
||||
jarm += codecs.encode(version, 'hex').decode('ascii')
|
||||
jarm += "|"
|
||||
#Extract extensions
|
||||
extensions = (extract_extension_info(data, counter, server_hello_length))
|
||||
jarm += extensions
|
||||
return jarm
|
||||
else:
|
||||
return "|||"
|
||||
|
||||
except Exception as e:
|
||||
return "|||"
|
||||
|
||||
#Deciphering the extensions in the server hello
|
||||
def extract_extension_info(data, counter, server_hello_length):
|
||||
try:
|
||||
#Error handling
|
||||
if (data[counter+47] == 11):
|
||||
return "|"
|
||||
elif (data[counter+50:counter+53] == b"\x0e\xac\x0b") or (data[82:85] == b"\x0f\xf0\x0b"):
|
||||
return "|"
|
||||
elif counter+42 >= server_hello_length:
|
||||
return "|"
|
||||
count = 49+counter
|
||||
length = int(codecs.encode(data[counter+47:counter+49], 'hex'), 16)
|
||||
maximum = length+(count-1)
|
||||
types = []
|
||||
values = []
|
||||
#Collect all extension types and values for later reference
|
||||
while count < maximum:
|
||||
types.append(data[count:count+2])
|
||||
ext_length = int(codecs.encode(data[count+2:count+4], 'hex'), 16)
|
||||
if ext_length == 0:
|
||||
count += 4
|
||||
values.append("")
|
||||
else:
|
||||
values.append(data[count+4:count+4+ext_length])
|
||||
count += ext_length+4
|
||||
result = ""
|
||||
#Read application_layer_protocol_negotiation
|
||||
alpn = find_extension(b"\x00\x10", types, values)
|
||||
result += str(alpn)
|
||||
result += "|"
|
||||
#Add formating hyphens
|
||||
add_hyphen = 0
|
||||
while add_hyphen < len(types):
|
||||
result += codecs.encode(types[add_hyphen], 'hex').decode('ascii')
|
||||
add_hyphen += 1
|
||||
if add_hyphen == len(types):
|
||||
break
|
||||
else:
|
||||
result += "-"
|
||||
return result
|
||||
#Error handling
|
||||
except IndexError as e:
|
||||
result = "|"
|
||||
return result
|
||||
|
||||
#Matching cipher extensions to values
|
||||
def find_extension(ext_type, types, values):
|
||||
iter = 0
|
||||
#For the APLN extension, grab the value in ASCII
|
||||
if ext_type == b"\x00\x10":
|
||||
while iter < len(types):
|
||||
if types[iter] == ext_type:
|
||||
return ((values[iter][3:]).decode())
|
||||
iter += 1
|
||||
else:
|
||||
while iter < len(types):
|
||||
if types[iter] == ext_type:
|
||||
return values[iter].hex()
|
||||
iter += 1
|
||||
return ""
|
||||
|
||||
#Custom fuzzy hash
|
||||
def jarm_hash(jarm_raw):
|
||||
#If jarm is empty, 62 zeros for the hash
|
||||
if jarm_raw == "|||,|||,|||,|||,|||,|||,|||,|||,|||,|||":
|
||||
return "0"*62
|
||||
fuzzy_hash = ""
|
||||
handshakes = jarm_raw.split(",")
|
||||
alpns_and_ext = ""
|
||||
for handshake in handshakes:
|
||||
components = handshake.split("|")
|
||||
#Custom jarm hash includes a fuzzy hash of the ciphers and versions
|
||||
fuzzy_hash += cipher_bytes(components[0])
|
||||
fuzzy_hash += version_byte(components[1])
|
||||
alpns_and_ext += components[2]
|
||||
alpns_and_ext += components[3]
|
||||
#Custom jarm hash has the sha256 of alpns and extensions added to the end
|
||||
sha256 = (hashlib.sha256(alpns_and_ext.encode())).hexdigest()
|
||||
fuzzy_hash += sha256[0:32]
|
||||
return fuzzy_hash
|
||||
|
||||
#Fuzzy hash for ciphers is the index number (in hex) of the cipher in the list
|
||||
def cipher_bytes(cipher):
|
||||
if cipher == "":
|
||||
return "00"
|
||||
list = [b"\x00\x04", b"\x00\x05", b"\x00\x07", b"\x00\x0a", b"\x00\x16", b"\x00\x2f", b"\x00\x33", b"\x00\x35", b"\x00\x39", b"\x00\x3c", b"\x00\x3d", b"\x00\x41", b"\x00\x45", b"\x00\x67", b"\x00\x6b", b"\x00\x84", b"\x00\x88", b"\x00\x9a", b"\x00\x9c", b"\x00\x9d", b"\x00\x9e", b"\x00\x9f", b"\x00\xba", b"\x00\xbe", b"\x00\xc0", b"\x00\xc4", b"\xc0\x07", b"\xc0\x08", b"\xc0\x09", b"\xc0\x0a", b"\xc0\x11", b"\xc0\x12", b"\xc0\x13", b"\xc0\x14", b"\xc0\x23", b"\xc0\x24", b"\xc0\x27", b"\xc0\x28", b"\xc0\x2b", b"\xc0\x2c", b"\xc0\x2f", b"\xc0\x30", b"\xc0\x60", b"\xc0\x61", b"\xc0\x72", b"\xc0\x73", b"\xc0\x76", b"\xc0\x77", b"\xc0\x9c", b"\xc0\x9d", b"\xc0\x9e", b"\xc0\x9f", b"\xc0\xa0", b"\xc0\xa1", b"\xc0\xa2", b"\xc0\xa3", b"\xc0\xac", b"\xc0\xad", b"\xc0\xae", b"\xc0\xaf", b'\xcc\x13', b'\xcc\x14', b'\xcc\xa8', b'\xcc\xa9', b'\x13\x01', b'\x13\x02', b'\x13\x03', b'\x13\x04', b'\x13\x05']
|
||||
count = 1
|
||||
for bytes in list:
|
||||
strtype_bytes = codecs.encode(bytes, 'hex').decode('ascii')
|
||||
if cipher == strtype_bytes:
|
||||
break
|
||||
count += 1
|
||||
hexvalue = str(hex(count))[2:]
|
||||
#This part must always be two bytes
|
||||
if len(hexvalue) < 2:
|
||||
return_bytes = "0" + hexvalue
|
||||
else:
|
||||
return_bytes = hexvalue
|
||||
return return_bytes
|
||||
|
||||
#This captures a single version byte based on version
|
||||
def version_byte(version):
|
||||
if version == "":
|
||||
return "0"
|
||||
options = "abcdef"
|
||||
count = int(version[3:4])
|
||||
byte = options[count]
|
||||
return byte
|
||||
|
||||
def ParseNumber(number):
|
||||
if number.startswith('0x'):
|
||||
return int(number[2:], 16)
|
||||
else:
|
||||
return int(number)
|
||||
|
||||
def get_jarm(destination_host, destination_port):
|
||||
#Select the packets and formats to send
|
||||
#Array format = [destination_host,destination_port,version,cipher_list,cipher_order,GREASE,RARE_APLN,1.3_SUPPORT,extension_orders]
|
||||
tls1_2_forward = [destination_host, destination_port, "TLS_1.2", "ALL", "FORWARD", "NO_GREASE", "APLN", "1.2_SUPPORT", "REVERSE"]
|
||||
tls1_2_reverse = [destination_host, destination_port, "TLS_1.2", "ALL", "REVERSE", "NO_GREASE", "APLN", "1.2_SUPPORT", "FORWARD"]
|
||||
tls1_2_top_half = [destination_host, destination_port, "TLS_1.2", "ALL", "TOP_HALF", "NO_GREASE", "APLN", "NO_SUPPORT", "FORWARD"]
|
||||
tls1_2_bottom_half = [destination_host, destination_port, "TLS_1.2", "ALL", "BOTTOM_HALF", "NO_GREASE", "RARE_APLN", "NO_SUPPORT", "FORWARD"]
|
||||
tls1_2_middle_out = [destination_host, destination_port, "TLS_1.2", "ALL", "MIDDLE_OUT", "GREASE", "RARE_APLN", "NO_SUPPORT", "REVERSE"]
|
||||
tls1_1_middle_out = [destination_host, destination_port, "TLS_1.1", "ALL", "FORWARD", "NO_GREASE", "APLN", "NO_SUPPORT", "FORWARD"]
|
||||
tls1_3_forward = [destination_host, destination_port, "TLS_1.3", "ALL", "FORWARD", "NO_GREASE", "APLN", "1.3_SUPPORT", "REVERSE"]
|
||||
tls1_3_reverse = [destination_host, destination_port, "TLS_1.3", "ALL", "REVERSE", "NO_GREASE", "APLN", "1.3_SUPPORT", "FORWARD"]
|
||||
tls1_3_invalid = [destination_host, destination_port, "TLS_1.3", "NO1.3", "FORWARD", "NO_GREASE", "APLN", "1.3_SUPPORT", "FORWARD"]
|
||||
tls1_3_middle_out = [destination_host, destination_port, "TLS_1.3", "ALL", "MIDDLE_OUT", "GREASE", "APLN", "1.3_SUPPORT", "REVERSE"]
|
||||
#Possible versions: SSLv3, TLS_1, TLS_1.1, TLS_1.2, TLS_1.3
|
||||
#Possible cipher lists: ALL, NO1.3
|
||||
#GREASE: either NO_GREASE or GREASE
|
||||
#APLN: either APLN or RARE_APLN
|
||||
#Supported Verisons extension: 1.2_SUPPPORT, NO_SUPPORT, or 1.3_SUPPORT
|
||||
#Possible Extension order: FORWARD, REVERSE
|
||||
queue = [tls1_2_forward, tls1_2_reverse, tls1_2_top_half, tls1_2_bottom_half, tls1_2_middle_out, tls1_1_middle_out, tls1_3_forward, tls1_3_reverse, tls1_3_invalid, tls1_3_middle_out]
|
||||
jarm = ""
|
||||
#Assemble, send, and decipher each packet
|
||||
iterate = 0
|
||||
while iterate < len(queue):
|
||||
payload = packet_building(queue[iterate])
|
||||
server_hello, ip = send_packet(payload, destination_host, destination_port)
|
||||
#Deal with timeout error
|
||||
if server_hello == "TIMEOUT":
|
||||
jarm = "|||,|||,|||,|||,|||,|||,|||,|||,|||,|||"
|
||||
break
|
||||
ans = read_packet(server_hello, queue[iterate])
|
||||
jarm += ans
|
||||
iterate += 1
|
||||
if iterate == len(queue):
|
||||
break
|
||||
else:
|
||||
jarm += ","
|
||||
#Fuzzy hash
|
||||
return jarm_hash(jarm)
|
475
analysis/classes/report.py
Executable file
475
analysis/classes/report.py
Executable file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user