Merge branch 'main' into main
This commit is contained in:
commit
3dcbd89979
@ -8,11 +8,11 @@ TinyCheck allows you to easily capture network communications from a smartphone
|
||||
|
||||
The idea of TinyCheck emerged in a meeting about stalkerware with a [French women's shelter](https://www.centre-hubertine-auclert.fr). During this meeting we talked about how to easily detect [stalkerware](https://stopstalkerware.org/) without installing very technical apps nor doing forensic analysis on the victim's smartphone. The initial concept was to develop a tiny kiosk device based on Raspberry Pi which can be used by non-tech people to test their smartphones against malicious communications issued by stalkerware or any spyware.
|
||||
|
||||
Of course, TinyCheck can also be used to spot any malicious communications from cybercrime to state-sponsored implants. It allows the end-user to push his own extended Indicators of Compromise via a backend in order to detect some ghosts over the wire.
|
||||
Of course, TinyCheck can also be used to spot any malicious communications from cybercrime to state-sponsored implants. It allows the end-user to push their own extended Indicators of Compromise via a backend in order to detect some ghosts over the wire.
|
||||
|
||||
<p align="center"><strong>If you need more documentation on how to install it, use it and the internals, don't hesitate to take a look at the <a href="https://github.com/KasperskyLab/TinyCheck/wiki">TinyCheck Wiki</a>.</strong></p>
|
||||
|
||||
<p align="center">If you have any question about the projet, want to contribute or just send your feedback, <br />don't hesitate to contact us at tinycheck[@]kaspersky[.]com.</p>
|
||||
<p align="center">If you have any question about the project, want to contribute or just send your feedback, <br />don't hesitate to contact us at tinycheck[@]kaspersky[.]com.</p>
|
||||
|
||||
### Use cases
|
||||
|
||||
|
@ -16,60 +16,82 @@ import os
|
||||
containing a capture.pcap file.
|
||||
"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) == 2:
|
||||
capture_directory = sys.argv[1]
|
||||
if os.path.isdir(capture_directory):
|
||||
|
||||
manager = Manager()
|
||||
alerts = manager.dict()
|
||||
def analyze(capture_directory,frontend=False):
|
||||
if os.path.isdir(capture_directory):
|
||||
|
||||
def zeekengine(alerts):
|
||||
zeek = ZeekEngine(capture_directory)
|
||||
zeek.start_zeek()
|
||||
alerts["zeek"] = zeek.retrieve_alerts()
|
||||
manager = Manager()
|
||||
alerts = manager.dict()
|
||||
|
||||
# whitelist.json writing.
|
||||
with open(os.path.join(capture_directory, "assets/whitelist.json"), "w") as f:
|
||||
f.write(json.dumps(zeek.retrieve_whitelist(),
|
||||
indent=4, separators=(',', ': ')))
|
||||
def zeekengine(alerts):
|
||||
zeek = ZeekEngine(capture_directory)
|
||||
zeek.start_zeek()
|
||||
alerts["zeek"] = zeek.retrieve_alerts()
|
||||
|
||||
# conns.json writing.
|
||||
with open(os.path.join(capture_directory, "assets/conns.json"), "w") as f:
|
||||
f.write(json.dumps(zeek.retrieve_conns(),
|
||||
indent=4, separators=(',', ': ')))
|
||||
if not os.path.isdir(os.path.join(capture_directory, "assets")):
|
||||
os.mkdir(os.path.join(capture_directory, "assets"))
|
||||
# whitelist.json writing.
|
||||
with open(os.path.join(capture_directory, "assets/whitelist.json"), "w") as f:
|
||||
f.write(json.dumps(zeek.retrieve_whitelist(),
|
||||
indent=4, separators=(',', ': ')))
|
||||
|
||||
def snortengine(alerts):
|
||||
suricata = SuricataEngine(capture_directory)
|
||||
suricata.start_suricata()
|
||||
alerts["suricata"] = suricata.get_alerts()
|
||||
# conns.json writing.
|
||||
with open(os.path.join(capture_directory, "assets/conns.json"), "w") as f:
|
||||
f.write(json.dumps(zeek.retrieve_conns(),
|
||||
indent=4, separators=(',', ': ')))
|
||||
|
||||
# Start the engines.
|
||||
p1 = Process(target=zeekengine, args=(alerts,))
|
||||
p2 = Process(target=snortengine, args=(alerts,))
|
||||
p1.start()
|
||||
p2.start()
|
||||
def snortengine(alerts):
|
||||
suricata = SuricataEngine(capture_directory)
|
||||
suricata.start_suricata()
|
||||
alerts["suricata"] = suricata.get_alerts()
|
||||
|
||||
# Wait to their end.
|
||||
p1.join()
|
||||
p2.join()
|
||||
# Start the engines.
|
||||
p1 = Process(target=zeekengine, args=(alerts,))
|
||||
p2 = Process(target=snortengine, args=(alerts,))
|
||||
p1.start()
|
||||
p2.start()
|
||||
|
||||
# Some formating and alerts.json writing.
|
||||
with open(os.path.join(capture_directory, "assets/alerts.json"), "w") as f:
|
||||
report = {"high": [], "moderate": [], "low": []}
|
||||
for alert in (alerts["zeek"] + alerts["suricata"]):
|
||||
if alert["level"] == "High":
|
||||
report["high"].append(alert)
|
||||
if alert["level"] == "Moderate":
|
||||
report["moderate"].append(alert)
|
||||
if alert["level"] == "Low":
|
||||
report["low"].append(alert)
|
||||
f.write(json.dumps(report, indent=4, separators=(',', ': ')))
|
||||
# Wait to their end.
|
||||
p1.join()
|
||||
p2.join()
|
||||
|
||||
# Some formating and alerts.json writing.
|
||||
with open(os.path.join(capture_directory, "assets/alerts.json"), "w") as f:
|
||||
report = {"high": [], "moderate": [], "low": []}
|
||||
for alert in (alerts["zeek"] + alerts["suricata"]):
|
||||
if alert["level"] == "High":
|
||||
report["high"].append(alert)
|
||||
if alert["level"] == "Moderate":
|
||||
report["moderate"].append(alert)
|
||||
if alert["level"] == "Low":
|
||||
report["low"].append(alert)
|
||||
f.write(json.dumps(report, indent=4, separators=(',', ': ')))
|
||||
|
||||
# Generate the report
|
||||
report = Report(capture_directory,frontend)
|
||||
report.generate_report()
|
||||
|
||||
# Generate the report
|
||||
report = Report(capture_directory)
|
||||
report.generate_report()
|
||||
else:
|
||||
print("The directory doesn't exist.")
|
||||
else:
|
||||
print("Please specify a capture directory in argument.")
|
||||
print("The directory doesn't exist.")
|
||||
|
||||
|
||||
def usage():
|
||||
print("""Usage: python analysis.py [capture_directory]
|
||||
where [capture_directory] is a directory containing a capture.pcap file
|
||||
analysis.py -f starts the analysis in frontend mode intended to be called by the TinyCheck frontend.""")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) == 2: #called manually without frontend
|
||||
analyze(sys.argv[1], False)
|
||||
elif len(sys.argv) == 3:
|
||||
if(sys.argv[1]) == "-f": #frontend mode
|
||||
analyze(sys.argv[2], True)
|
||||
else:
|
||||
usage()
|
||||
|
||||
else:
|
||||
usage()
|
||||
|
||||
|
||||
|
||||
|
@ -13,7 +13,7 @@ from utils import get_config
|
||||
|
||||
class Report(object):
|
||||
|
||||
def __init__(self, capture_directory):
|
||||
def __init__(self, capture_directory, frontend):
|
||||
self.capture_directory = capture_directory
|
||||
self.alerts = self.read_json(os.path.join(
|
||||
capture_directory, "assets/alerts.json"))
|
||||
@ -21,10 +21,13 @@ class Report(object):
|
||||
capture_directory, "assets/whitelist.json"))
|
||||
self.conns = self.read_json(os.path.join(
|
||||
capture_directory, "assets/conns.json"))
|
||||
self.device = self.read_json(os.path.join(
|
||||
capture_directory, "assets/device.json"))
|
||||
self.capinfos = self.read_json(os.path.join(
|
||||
capture_directory, "assets/capinfos.json"))
|
||||
self.device = None
|
||||
self.capinfos = None
|
||||
if frontend:
|
||||
self.device = self.read_json(os.path.join(
|
||||
capture_directory, "assets/device.json"))
|
||||
self.capinfos = self.read_json(os.path.join(
|
||||
capture_directory, "assets/capinfos.json"))
|
||||
try:
|
||||
with open(os.path.join(self.capture_directory, "capture.pcap"), "rb") as f:
|
||||
self.capture_sha1 = hashlib.sha1(f.read()).hexdigest()
|
||||
@ -204,16 +207,18 @@ class Report(object):
|
||||
"""
|
||||
header = "<div class=\"header\">"
|
||||
header += "<div class=\"logo\"></div>"
|
||||
header += "<p><br /><strong>{}: {}</strong><br />".format(self.template["device_name"],
|
||||
if self.device is not None:
|
||||
header += "<p><br /><strong>{}: {}</strong><br />".format(self.template["device_name"],
|
||||
self.device["name"])
|
||||
header += "{}: {}<br />".format(self.template["device_mac"],
|
||||
header += "{}: {}<br />".format(self.template["device_mac"],
|
||||
self.device["mac_address"])
|
||||
header += "{} {}<br />".format(self.template["report_generated_on"],
|
||||
datetime.now().strftime("%d/%m/%Y - %H:%M:%S"))
|
||||
header += "{}: {}s<br />".format(self.template["capture_duration"],
|
||||
self.capinfos["Capture duration"])
|
||||
header += "{}: {}<br />".format(self.template["packets_number"],
|
||||
self.capinfos["Number of packets"])
|
||||
if self.capinfos is not None:
|
||||
header += "{}: {}s<br />".format(self.template["capture_duration"],
|
||||
self.capinfos["Capture duration"])
|
||||
header += "{}: {}<br />".format(self.template["packets_number"],
|
||||
self.capinfos["Number of packets"])
|
||||
header += "{}: {}<br />".format(
|
||||
self.template["capture_sha1"], self.capture_sha1)
|
||||
header += "</p>"
|
||||
|
@ -236,6 +236,7 @@ class ZeekEngine(object):
|
||||
pass
|
||||
|
||||
try: # Domain history check.
|
||||
|
||||
whois_record = whois.whois(c["resolution"])
|
||||
creation_date = whois_record.creation_date if type(
|
||||
whois_record.creation_date) is not list else whois_record.creation_date[0]
|
||||
@ -247,6 +248,7 @@ class ZeekEngine(object):
|
||||
"host": c["resolution"],
|
||||
"level": "Moderate",
|
||||
"id": "ACT-02"})
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
@ -447,7 +449,6 @@ class ZeekEngine(object):
|
||||
self.working_dir), shell=True).wait()
|
||||
sp.Popen("cd {} && mv *.log assets/".format(self.working_dir),
|
||||
shell=True).wait()
|
||||
|
||||
self.fill_dns(self.working_dir + "/assets/")
|
||||
self.netflow_check(self.working_dir + "/assets/")
|
||||
self.ssl_check(self.working_dir + "/assets/")
|
||||
|
@ -10,7 +10,7 @@ import os
|
||||
from functools import reduce
|
||||
|
||||
# I'm not going to use an ORM for that.
|
||||
parent = "/".join(sys.path[0].split("/")[:-1])
|
||||
parent = os.path.split(os.path.dirname(os.path.abspath(sys.argv[0])))[0]
|
||||
conn = sqlite3.connect(os.path.join(parent, "tinycheck.sqlite3"))
|
||||
cursor = conn.cursor()
|
||||
|
||||
|
2
app/backend/package-lock.json
generated
2
app/backend/package-lock.json
generated
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "tinycheck-backend",
|
||||
"name": "@kaspersky/tinycheck-backend",
|
||||
"version": "0.1.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
|
@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "tinycheck-backend",
|
||||
"name": "@kaspersky/tinycheck-backend",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"serve": "vue-cli-service serve --copy --port=4201",
|
||||
"build": "vue-cli-service build",
|
||||
|
2
app/frontend/package-lock.json
generated
2
app/frontend/package-lock.json
generated
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "tinycheck-new",
|
||||
"name": "@kaspersky/tinycheck-new",
|
||||
"version": "0.1.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
|
@ -1,7 +1,6 @@
|
||||
{
|
||||
"name": "tinycheck-new",
|
||||
"name": "@kaspersky/tinycheck-new",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"serve": "vue-cli-service serve --copy --port=4202",
|
||||
"build": "vue-cli-service build",
|
||||
|
@ -16,3 +16,4 @@ qrcode
|
||||
netifaces
|
||||
weasyprint
|
||||
python-whois
|
||||
six
|
||||
|
@ -52,7 +52,8 @@ frontend:
|
||||
user_lang: userlang
|
||||
update: updateoption
|
||||
choose_net: false
|
||||
|
||||
http_port: 80
|
||||
|
||||
# NETWORK -
|
||||
# Some elements related to the network configuration, such as
|
||||
# the interfaces (updated during the install), the list of SSIDs
|
||||
@ -77,6 +78,6 @@ network:
|
||||
watchers:
|
||||
iocs:
|
||||
- https://raw.githubusercontent.com/KasperskyLab/TinyCheck/main/assets/iocs.json
|
||||
- https://raw.githubusercontent.com/Te-k/stalkerware-indicators/master/indicators-for-tinycheck.json
|
||||
- https://raw.githubusercontent.com/Te-k/stalkerware-indicators/master/generated/indicators-for-tinycheck.json
|
||||
whitelists:
|
||||
- https://raw.githubusercontent.com/KasperskyLab/TinyCheck/main/assets/whitelist.json
|
||||
|
@ -265,6 +265,7 @@ change_hostname() {
|
||||
install_package() {
|
||||
# Install associated packages by using aptitude.
|
||||
if [[ $1 == "dnsmasq" || $1 == "hostapd" || $1 == "tshark" || $1 == "sqlite3" || $1 == "unclutter" || $1 == "swig" || $1 == "curl" ]]; then
|
||||
|
||||
apt-get install $1 -y
|
||||
elif [[ $1 == "suricata" ]];then
|
||||
add-apt-repository ppa:oisf/suricata-stable
|
||||
@ -323,9 +324,9 @@ check_dependencies() {
|
||||
"/usr/bin/unclutter"
|
||||
"/usr/bin/sqlite3"
|
||||
"/usr/bin/pip"
|
||||
"/usr/bin/swig"
|
||||
"/usr/sbin/dhcpcd"
|
||||
"/usr/bin/curl")
|
||||
"/usr/bin/swig"
|
||||
"/usr/sbin/dhcpcd"
|
||||
"/usr/bin/curl")
|
||||
|
||||
echo -e "\e[39m[+] Checking dependencies...\e[39m"
|
||||
for bin in "${bins[@]}"
|
||||
@ -420,6 +421,7 @@ check_interfaces(){
|
||||
do
|
||||
if echo "$iface" | grep -Eq "(wlan[0-9]|wl[a-z0-9]{,20})"; then
|
||||
config="$(ip a s $iface)" # Get the iface logic configuration
|
||||
|
||||
if echo "$config" | grep -qv "inet "; then # Test if not currently connected
|
||||
hw="$(iw $iface info | grep wiphy | cut -d" " -f2)" # Get the iface hardware id.
|
||||
info="$(iw phy$hw info)" # Get the iface hardware infos.
|
||||
|
@ -24,7 +24,7 @@ class Analysis(object):
|
||||
if self.token is not None:
|
||||
parent = "/".join(sys.path[0].split("/")[:-2])
|
||||
sp.Popen(
|
||||
[sys.executable, "{}/analysis/analysis.py".format(parent), "/tmp/{}".format(self.token)])
|
||||
[sys.executable, "{}/analysis/analysis.py".format(parent), "-f", "/tmp/{}".format(self.token)])
|
||||
return {"status": True,
|
||||
"message": "Analysis started",
|
||||
"token": self.token}
|
||||
|
@ -46,7 +46,12 @@ app.register_blueprint(misc_bp, url_prefix='/api/misc')
|
||||
app.register_blueprint(update_bp, url_prefix='/api/update')
|
||||
|
||||
if __name__ == '__main__':
|
||||
port = ""
|
||||
try:
|
||||
port = int(read_config(("frontend", "http_port")))
|
||||
except:
|
||||
port = 80
|
||||
if read_config(("frontend", "remote_access")):
|
||||
app.run(host="0.0.0.0", port=80)
|
||||
app.run(host="0.0.0.0", port=port)
|
||||
else:
|
||||
app.run(port=80)
|
||||
app.run(port=port)
|
||||
|
Loading…
Reference in New Issue
Block a user