Compare commits
13 Commits
imgbot
...
pixeebot/d
Author | SHA1 | Date | |
---|---|---|---|
a2621fc05b | |||
3ff9520114 | |||
cf5808cf71 | |||
052ce4cd9c | |||
1eecd892f7 | |||
f9be1daccb | |||
6d0560c311 | |||
dfdbff5100 | |||
e93a4c16de | |||
4e4c6172cd | |||
c3f09469b9 | |||
69512ba605 | |||
bd4ab27c21 |
19
.github/workflows/snorkell-auto-documentation.yml
vendored
Normal file
19
.github/workflows/snorkell-auto-documentation.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# This workflow will improvise current file with AI genereated documentation and Create new PR
|
||||
|
||||
name: Snorkell.ai - Revolutionizing Documentation on GitHub
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Documentation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Snorkell DocGen Client
|
||||
uses: SingularityX-ai/snorkell-documentation-client@v1.0.0
|
||||
with:
|
||||
client_id: ${{ secrets.SNORKELL_CLIENT_ID }}
|
||||
api_key: ${{ secrets.SNORKELL_API_KEY }}
|
||||
branch_name: "main"
|
@ -2,9 +2,9 @@ pymisp==2.4.165.1
|
||||
sqlalchemy==1.4.48
|
||||
ipwhois==1.2.0
|
||||
netaddr==0.8.0
|
||||
flask==1.1.2
|
||||
flask==2.2.5
|
||||
flask_httpauth==4.8.0
|
||||
pyjwt==1.7.1
|
||||
pyjwt==2.4.0
|
||||
psutil==5.8.0
|
||||
pydig==0.4.0
|
||||
pyudev==0.24.0
|
||||
|
@ -1,149 +1,149 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from app.utils import read_config
|
||||
from app.classes.iocs import IOCs
|
||||
from app.classes.whitelist import WhiteList
|
||||
from app.classes.misp import MISP
|
||||
|
||||
import requests
|
||||
import json
|
||||
import urllib3
|
||||
import time
|
||||
from multiprocessing import Process
|
||||
|
||||
"""
|
||||
This file is parsing the watchers present
|
||||
in the configuration file. This in order to get
|
||||
automatically new iocs / elements from remote
|
||||
sources without user interaction.
|
||||
"""
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
def watch_iocs():
|
||||
"""
|
||||
Retrieve IOCs from the remote URLs defined in config/watchers.
|
||||
For each IOC, add it to the DB.
|
||||
"""
|
||||
|
||||
# Retrieve the URLs from the configuration
|
||||
urls = read_config(("watchers", "iocs"))
|
||||
watchers = [{"url": url, "status": False} for url in urls]
|
||||
|
||||
while True:
|
||||
for w in watchers:
|
||||
if w["status"] == False:
|
||||
iocs = IOCs()
|
||||
iocs_list = []
|
||||
to_delete = []
|
||||
try:
|
||||
res = requests.get(w["url"], verify=False)
|
||||
if res.status_code == 200:
|
||||
content = json.loads(res.content)
|
||||
iocs_list = content["iocs"] if "iocs" in content else []
|
||||
to_delete = content["to_delete"] if "to_delete" in content else []
|
||||
else:
|
||||
w["status"] = False
|
||||
except:
|
||||
w["status"] = False
|
||||
|
||||
for ioc in iocs_list:
|
||||
try:
|
||||
iocs.add(ioc["type"], ioc["tag"],
|
||||
ioc["tlp"], ioc["value"], "watcher")
|
||||
w["status"] = True
|
||||
except:
|
||||
continue
|
||||
|
||||
for ioc in to_delete:
|
||||
try:
|
||||
iocs.delete_by_value(ioc["value"])
|
||||
w["status"] = True
|
||||
except:
|
||||
continue
|
||||
|
||||
# If at least one URL haven't be parsed, let's retry in 1min.
|
||||
if False in [w["status"] for w in watchers]:
|
||||
time.sleep(60)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def watch_whitelists():
|
||||
"""
|
||||
Retrieve whitelist elements from the remote URLs
|
||||
defined in config/watchers. For each (new ?) element,
|
||||
add it to the DB.
|
||||
"""
|
||||
|
||||
urls = read_config(("watchers", "whitelists"))
|
||||
watchers = [{"url": url, "status": False} for url in urls]
|
||||
|
||||
while True:
|
||||
for w in watchers:
|
||||
if w["status"] == False:
|
||||
whitelist = WhiteList()
|
||||
elements = []
|
||||
to_delete = []
|
||||
try:
|
||||
res = requests.get(w["url"], verify=False)
|
||||
if res.status_code == 200:
|
||||
content = json.loads(res.content)
|
||||
elements = content["elements"] if "elements" in content else []
|
||||
to_delete = content["to_delete"] if "to_delete" in content else []
|
||||
else:
|
||||
w["status"] = False
|
||||
except:
|
||||
w["status"] = False
|
||||
|
||||
for elem in elements:
|
||||
try:
|
||||
whitelist.add(elem["type"], elem["element"], "watcher")
|
||||
w["status"] = True
|
||||
except:
|
||||
continue
|
||||
|
||||
for elem in to_delete:
|
||||
try:
|
||||
whitelist.delete_by_value(elem["element"])
|
||||
w["status"] = True
|
||||
except:
|
||||
continue
|
||||
|
||||
if False in [w["status"] for w in watchers]:
|
||||
time.sleep(60)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def watch_misp():
|
||||
"""
|
||||
Retrieve IOCs from misp instances. Each new element is
|
||||
tested and then added to the database.
|
||||
"""
|
||||
iocs, misp = IOCs(), MISP()
|
||||
instances = [i for i in misp.get_instances()]
|
||||
|
||||
while instances:
|
||||
for i, ist in enumerate(instances):
|
||||
status = misp.test_instance(ist["url"],
|
||||
ist["apikey"],
|
||||
ist["verifycert"])
|
||||
if status:
|
||||
for ioc in misp.get_iocs(ist["id"]):
|
||||
iocs.add(ioc["type"], ioc["tag"], ioc["tlp"],
|
||||
ioc["value"], "misp-{}".format(ist["id"]))
|
||||
misp.update_sync(ist["id"])
|
||||
instances.pop(i)
|
||||
if instances: time.sleep(60)
|
||||
|
||||
|
||||
p1 = Process(target=watch_iocs)
|
||||
p2 = Process(target=watch_whitelists)
|
||||
p3 = Process(target=watch_misp)
|
||||
|
||||
p1.start()
|
||||
p2.start()
|
||||
p3.start()
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from app.utils import read_config
|
||||
from app.classes.iocs import IOCs
|
||||
from app.classes.whitelist import WhiteList
|
||||
from app.classes.misp import MISP
|
||||
|
||||
import requests
|
||||
import json
|
||||
import urllib3
|
||||
import time
|
||||
from multiprocessing import Process
|
||||
|
||||
"""
|
||||
This file is parsing the watchers present
|
||||
in the configuration file. This in order to get
|
||||
automatically new iocs / elements from remote
|
||||
sources without user interaction.
|
||||
"""
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
def watch_iocs():
|
||||
"""
|
||||
Retrieve IOCs from the remote URLs defined in config/watchers.
|
||||
For each IOC, add it to the DB.
|
||||
"""
|
||||
|
||||
# Retrieve the URLs from the configuration
|
||||
urls = read_config(("watchers", "iocs"))
|
||||
watchers = [{"url": url, "status": False} for url in urls]
|
||||
|
||||
while True:
|
||||
for w in watchers:
|
||||
if w["status"] == False:
|
||||
iocs = IOCs()
|
||||
iocs_list = []
|
||||
to_delete = []
|
||||
try:
|
||||
res = requests.get(w["url"], verify=True)
|
||||
if res.status_code == 200:
|
||||
content = json.loads(res.content)
|
||||
iocs_list = content["iocs"] if "iocs" in content else []
|
||||
to_delete = content["to_delete"] if "to_delete" in content else []
|
||||
else:
|
||||
w["status"] = False
|
||||
except:
|
||||
w["status"] = False
|
||||
|
||||
for ioc in iocs_list:
|
||||
try:
|
||||
iocs.add(ioc["type"], ioc["tag"],
|
||||
ioc["tlp"], ioc["value"], "watcher")
|
||||
w["status"] = True
|
||||
except:
|
||||
continue
|
||||
|
||||
for ioc in to_delete:
|
||||
try:
|
||||
iocs.delete_by_value(ioc["value"])
|
||||
w["status"] = True
|
||||
except:
|
||||
continue
|
||||
|
||||
# If at least one URL haven't be parsed, let's retry in 1min.
|
||||
if False in [w["status"] for w in watchers]:
|
||||
time.sleep(60)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def watch_whitelists():
|
||||
"""
|
||||
Retrieve whitelist elements from the remote URLs
|
||||
defined in config/watchers. For each (new ?) element,
|
||||
add it to the DB.
|
||||
"""
|
||||
|
||||
urls = read_config(("watchers", "whitelists"))
|
||||
watchers = [{"url": url, "status": False} for url in urls]
|
||||
|
||||
while True:
|
||||
for w in watchers:
|
||||
if w["status"] == False:
|
||||
whitelist = WhiteList()
|
||||
elements = []
|
||||
to_delete = []
|
||||
try:
|
||||
res = requests.get(w["url"], verify=True)
|
||||
if res.status_code == 200:
|
||||
content = json.loads(res.content)
|
||||
elements = content["elements"] if "elements" in content else []
|
||||
to_delete = content["to_delete"] if "to_delete" in content else []
|
||||
else:
|
||||
w["status"] = False
|
||||
except:
|
||||
w["status"] = False
|
||||
|
||||
for elem in elements:
|
||||
try:
|
||||
whitelist.add(elem["type"], elem["element"], "watcher")
|
||||
w["status"] = True
|
||||
except:
|
||||
continue
|
||||
|
||||
for elem in to_delete:
|
||||
try:
|
||||
whitelist.delete_by_value(elem["element"])
|
||||
w["status"] = True
|
||||
except:
|
||||
continue
|
||||
|
||||
if False in [w["status"] for w in watchers]:
|
||||
time.sleep(60)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def watch_misp():
|
||||
"""
|
||||
Retrieve IOCs from misp instances. Each new element is
|
||||
tested and then added to the database.
|
||||
"""
|
||||
iocs, misp = IOCs(), MISP()
|
||||
instances = [i for i in misp.get_instances()]
|
||||
|
||||
while instances:
|
||||
for i, ist in enumerate(instances):
|
||||
status = misp.test_instance(ist["url"],
|
||||
ist["apikey"],
|
||||
ist["verifycert"])
|
||||
if status:
|
||||
for ioc in misp.get_iocs(ist["id"]):
|
||||
iocs.add(ioc["type"], ioc["tag"], ioc["tlp"],
|
||||
ioc["value"], "misp-{}".format(ist["id"]))
|
||||
misp.update_sync(ist["id"])
|
||||
instances.pop(i)
|
||||
if instances: time.sleep(60)
|
||||
|
||||
|
||||
p1 = Process(target=watch_iocs)
|
||||
p2 = Process(target=watch_whitelists)
|
||||
p3 = Process(target=watch_misp)
|
||||
|
||||
p1.start()
|
||||
p2.start()
|
||||
p3.start()
|
||||
|
Reference in New Issue
Block a user