Skip to content

Commit 718fc09

Browse files
authored
Merge pull request #69 from certego/develop
0.4.0
2 parents eb66ddf + d5e233f commit 718fc09

33 files changed

+1289
-128
lines changed

.env_template

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
### ---- Environment variables for docker-compose ----
2+
### All services specified in all compose-files in variable COMPOSE_FILE will be built/ran
3+
### By default, when you use `docker-compose up` only docker-compose.yml is read
4+
### For each additional integration, the location of it's docker-compose.<>.yml file should be appended to
5+
### the COMPOSE_FILE variable each seperated with ':'. If you are on windows, replace all ':' with ';'.
6+
### Reference to Docker's official Docs: https://docs.docker.com/compose/reference/envvars/#compose_file#compose_file
7+
8+
## Default
9+
COMPOSE_FILE=docker-compose.yml
10+
11+
## To run all additional integrations
12+
#COMPOSE_FILE=docker-compose.yml:./integrations/docker-compose.peframe.yml
13+
14+
## To run tests or for local development
15+
#COMPOSE_FILE=docker-compose-for-tests.yml
16+
#COMPOSE_FILE=docker-compose-for-tests.yml:./integrations/docker-compose-for-tests.peframe.yml
17+
18+
## For travis
19+
#COMPOSE_FILE=docker-compose-for-travis.yml
20+
#COMPOSE_FILE=docker-compose-for-travis.yml./integrations/docker-compose.peframe.yml

.gitignore

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
.idea
2+
.vscode
23
__pycache__
34
test_files
45
env_file_app
5-
env_file_postgres
6+
env_file_postgres
7+
env_file_integrations
8+
.env

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ RUN pip3 install --compile -r requirements.txt
1818
COPY . $PYTHONPATH
1919

2020
RUN touch /var/log/intel_owl/django/api_app.log /var/log/intel_owl/django/api_app_errors.log \
21-
touch /var/log/intel_owl/django/celery.log /var/log/intel_owl/django/celery_errors.log \
21+
&& touch /var/log/intel_owl/django/celery.log /var/log/intel_owl/django/celery_errors.log \
2222
&& chown -R www-data:www-data /var/log/intel_owl /opt/deploy/ \
2323
# this is cause stringstifer creates this directory during the build and cause celery to crash
2424
&& rm -rf /root/.local

Dockerfile_nginx

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
FROM library/nginx:1.16.1-alpine
2+
VOLUME /var/log/nginx

README.md

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,8 @@ Main features:
3939
* Yara (Community, Neo23x0 and Intezer rules are already available. There's the chance to add your own rules)
4040

4141
### External services available
42+
#### required paid or trial api key
43+
* GreyNoise v2
4244
#### required paid or free api key
4345
* VirusTotal v2 + v3
4446
* HybridAnalysis
@@ -47,23 +49,27 @@ Main features:
4749
* Hunter.io - Email Hunting
4850
* ONYPHE
4951
* Censys.io
52+
* SecurityTrails
5053
#### required free api key
5154
* GoogleSafeBrowsing
5255
* AbuseIPDB
5356
* Shodan
5457
* HoneyDB
5558
* AlienVault OTX
5659
* MaxMind
60+
* Auth0
5761
#### needed access request
5862
* CIRCL PassiveDNS + PassiveSSL
5963
#### without api key
6064
* Fortiguard URL Analyzer
61-
* GreyNoise Alpha API
65+
* GreyNoise Alpha API v1
6266
* Talos Reputation
6367
* Tor Project
6468
* Robtex
6569
* Threatminer
6670
* Abuse.ch MalwareBazaar
71+
* Abuse.ch URLhaus
72+
* Active DNS
6773

6874
### Documentation
6975
[![Documentation Status](https://readthedocs.org/projects/intelowl/badge/?version=latest)](https://intelowl.readthedocs.io/en/latest/?badge=latest)
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
import requests
2+
import traceback
3+
import logging
4+
import time
5+
6+
from api_app.exceptions import AnalyzerRunException
7+
from api_app.script_analyzers import general
8+
9+
logger = logging.getLogger(__name__)
10+
11+
12+
def run(analyzer_name, job_id, filepath, filename, md5, additional_config_params):
13+
logger.info("started analyzer {} job_id {}"
14+
"".format(analyzer_name, job_id))
15+
report = general.get_basic_report_template(analyzer_name)
16+
try:
17+
# get binary
18+
binary = general.get_binary(job_id)
19+
# run analysis
20+
files = {
21+
"file": binary
22+
}
23+
r = requests.post("http://peframe:4000/run_analysis", files=files)
24+
r_data = r.json()
25+
if r.status_code == 200:
26+
max_tries = additional_config_params.get('max_tries', 15)
27+
res = _poll_for_result(job_id, r_data['md5'], max_tries)
28+
else:
29+
raise AnalyzerRunException(r_data['error'])
30+
31+
# limit the length of the strings dump
32+
if 'strings' in res and 'dump' in res['strings']:
33+
res['strings']['dump'] = res['strings']['dump'][:100]
34+
35+
report['report'] = res
36+
except AnalyzerRunException as e:
37+
error_message = f"job_id:{job_id} analyzer:{analyzer_name} md5:{md5} filename:{filename} Analyzer Error: {e}"
38+
logger.error(error_message)
39+
report['errors'].append(error_message)
40+
report['success'] = False
41+
except Exception as e:
42+
traceback.print_exc()
43+
error_message = f"job_id:{job_id} analyzer:{analyzer_name} md5:{md5} filename:{filename} Unexpected Error: {e}"
44+
logger.exception(error_message)
45+
report['errors'].append(str(e))
46+
report['success'] = False
47+
else:
48+
report['success'] = True
49+
50+
general.set_report_and_cleanup(job_id, report)
51+
52+
logger.info(f"ended analyzer:{analyzer_name} job_id:{job_id}")
53+
54+
return report
55+
56+
57+
def _poll_for_result(job_id, hash, max_tries):
58+
poll_distance = 5
59+
got_result = False
60+
for chance in range(max_tries):
61+
time.sleep(poll_distance)
62+
logger.info(f"PEframe polling. Try n:{chance+1}, job_id:{job_id}. Starting the query")
63+
try:
64+
status_code, json_data = _query_for_result(hash)
65+
except requests.RequestException as e:
66+
raise AnalyzerRunException(e)
67+
analysis_status = json_data.get('status', None)
68+
if analysis_status in ["success", "reported_with_fails", "failed"]:
69+
got_result = True
70+
break
71+
elif status_code == 404:
72+
pass
73+
else:
74+
logger.info(f"PEframe polling. Try n:{chance+1}, job_id:{job_id}, status:{analysis_status}")
75+
76+
if got_result:
77+
return json_data
78+
else:
79+
raise AnalyzerRunException(f"max peframe polls tried without getting any result. job_id:{job_id}")
80+
81+
82+
def _query_for_result(hash):
83+
headers = {
84+
'Accept': 'application/json'
85+
}
86+
resp = requests.get(f"http://peframe:4000/get_report/{hash}", headers=headers)
87+
data = resp.json()
88+
return resp.status_code, data
Lines changed: 176 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,176 @@
1+
"""Module to retrieve active DNS resolution
2+
"""
3+
4+
import traceback
5+
import requests
6+
import ipaddress
7+
import socket
8+
9+
from api_app.exceptions import AnalyzerConfigurationException, AnalyzerRunException
10+
from api_app.script_analyzers import general
11+
12+
import logging
13+
logger = logging.getLogger(__name__)
14+
15+
16+
def run(analyzer_name, job_id, observable_name, observable_classification, additional_config_params):
17+
"""Run ActiveDNS analyzer
18+
19+
Admit:
20+
* additional_config_params[service]: google - Google DoH (DNS over HTTPS)
21+
* additional_config_params[service]: cloudflare - CloudFlare DoH (DNS over HTTPS)
22+
* additional_config_params[service]: classic - classic DNS query
23+
24+
Google and CloudFlare return an IP (or NXDOMAIN) from a domain.
25+
Classic support also reverse lookup (domain from IP)
26+
27+
:param analyzer_name: Analyzer configuration in analyzer_config.json
28+
:type analyzer_name: str
29+
:param job_id: job identifier
30+
:type job_id: str
31+
:param observable_name: analyzed observable
32+
:type observable_name: str
33+
:param observable_classification: observable classification (allow: ip or domain) ip only classic
34+
:type observable_classification: str
35+
:param additional_config_params: params service to select the service
36+
:type additional_config_params: dict
37+
:return: report: name: observable_name, resolution: ip,NXDOMAIN, ''
38+
:rtype: report: dict
39+
"""
40+
logger.info(f"started analyzer {analyzer_name} job_id {job_id} observable {observable_name}")
41+
report = general.get_basic_report_template(analyzer_name)
42+
43+
try:
44+
dns_type = additional_config_params.get('service', '')
45+
if dns_type == 'google':
46+
_doh_google(job_id, analyzer_name, observable_classification, observable_name, report)
47+
elif dns_type == 'cloudflare':
48+
_doh_cloudflare(job_id, analyzer_name, observable_classification, observable_name,
49+
report)
50+
elif dns_type == 'classic':
51+
_classic_dns(job_id, analyzer_name, observable_classification, observable_name, report)
52+
else:
53+
raise AnalyzerConfigurationException(f'Service selected: {dns_type} is not available')
54+
55+
except (AnalyzerConfigurationException, AnalyzerRunException) as e:
56+
error_message = f"job_id:{job_id} analyzer:{analyzer_name} " \
57+
f"observable_name:{observable_name} Analyzer error {e}"
58+
logger.error(error_message)
59+
report['errors'].append(error_message)
60+
report['success'] = False
61+
except Exception as e:
62+
traceback.print_exc()
63+
error_message = f"job_id:{job_id} analyzer:{analyzer_name} " \
64+
f"observable_name:{observable_name} Unexpected error {e}"
65+
logger.exception(error_message)
66+
report['errors'].append(str(e))
67+
report['success'] = False
68+
69+
general.set_report_and_cleanup(job_id, report)
70+
71+
logger.info(f"ended analyzer {analyzer_name} job_id {job_id} observable {observable_name}")
72+
73+
return report
74+
75+
76+
def _doh_google(job_id, analyzer_name, observable_classification, observable_name, report):
77+
if observable_classification == 'domain':
78+
try:
79+
params = {
80+
"name": observable_name,
81+
# this filter should work but it is not
82+
"type": 1
83+
}
84+
response = requests.get('https://dns.google.com/resolve', params=params)
85+
response.raise_for_status()
86+
data = response.json()
87+
ip = ''
88+
answers = data.get("Answer", [])
89+
for answer in answers:
90+
if answer.get('type', 1) == 1:
91+
ip = answer.get('data', 'NXDOMAIN')
92+
break
93+
if not ip:
94+
logger.error(f"observable {observable_name} active_dns query retrieved no valid A answer: {answers}")
95+
report['report'] = {'name': observable_name, 'resolution': ip}
96+
report['success'] = True
97+
except requests.exceptions.RequestException as error:
98+
error_message = f"job_id:{job_id}, analyzer:{analyzer_name}, " \
99+
f"observable_classification:{observable_classification}, " \
100+
f"observable_name:{observable_name}, RequestException {error}"
101+
logger.error(error_message)
102+
report['errors'].append(error_message)
103+
report['success'] = False
104+
else:
105+
error_message = f"job_id:{job_id}, analyzer:{analyzer_name}, " \
106+
f"observable_classification:{observable_classification}, " \
107+
f"observable_name:{observable_name}, " \
108+
f"cannot analyze something different from domain"
109+
logger.error(error_message)
110+
report['errors'].append(error_message)
111+
report['success'] = False
112+
113+
114+
def _doh_cloudflare(job_id, analyzer_name, observable_classification, observable_name, report):
115+
if observable_classification == 'domain':
116+
try:
117+
client = requests.session()
118+
params = {
119+
'name': observable_name,
120+
'type': 'A',
121+
'ct': 'application/dns-json',
122+
}
123+
response = client.get('https://cloudflare-dns.com/dns-query', params=params)
124+
response.raise_for_status()
125+
response_dict = response.json()
126+
response_answer = response_dict.get('Answer', [])
127+
# first resolution or NXDOMAIN if domain does not exist
128+
result_data = response_answer[0].get('data', 'NXDOMAIN') if response_answer else 'NXDOMAIN'
129+
report['report'] = {'name': observable_name, 'resolution': result_data}
130+
report['success'] = True
131+
except requests.exceptions.RequestException as error:
132+
error_message = f"job_id:{job_id}, analyzer:{analyzer_name}, " \
133+
f"observable_classification:{observable_classification}, " \
134+
f"observable_name:{observable_name}, RequestException {error}"
135+
logger.error(error_message)
136+
report['errors'].append(error_message)
137+
report['success'] = False
138+
else:
139+
error_message = f"job_id:{job_id}, analyzer:{analyzer_name}, " \
140+
f"observable_classification:{observable_classification}, " \
141+
f"observable_name:{observable_name}, " \
142+
f"cannot analyze something different from domain"
143+
logger.error(error_message)
144+
report['errors'].append(error_message)
145+
report['success'] = False
146+
147+
148+
def _classic_dns(job_id, analyzer_name, observable_classification, observable_name, report):
149+
result = {}
150+
if observable_classification == 'ip':
151+
ipaddress.ip_address(observable_name)
152+
try:
153+
domains = socket.gethostbyaddr(observable_name)
154+
# return a tuple (hostname, aliaslist, ipaddrlist), select hostname
155+
# if does not exist return socket.herror
156+
if domains:
157+
resolution = domains[0]
158+
except socket.herror:
159+
resolution = ''
160+
result = {'name': observable_name, 'resolution': resolution}
161+
elif observable_classification == 'domain':
162+
try:
163+
resolution = socket.gethostbyname(observable_name)
164+
except socket.gaierror:
165+
resolution = 'NXDOMAIN'
166+
result = {'name': observable_name, 'resolution': resolution}
167+
else:
168+
error_message = f"job_id:{job_id}, analyzer:{analyzer_name}, " \
169+
f"observable_classification: {observable_classification}, " \
170+
f"observable_name:{observable_name}, not analyzable"
171+
logger.error(error_message)
172+
report['errors'].append(error_message)
173+
report['success'] = False
174+
175+
report['report'] = result
176+
report['success'] = True

api_app/script_analyzers/observable_analyzers/honeydb_twitter_scan.py renamed to api_app/script_analyzers/observable_analyzers/auth0.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,24 +15,22 @@ def run(analyzer_name, job_id, observable_name, observable_classification, addit
1515
"".format(analyzer_name, job_id, observable_name))
1616
report = general.get_basic_report_template(analyzer_name)
1717
try:
18-
api_key_name = additional_config_params.get('api_key_name', 'HONEYDB_API_KEY')
19-
api_id_name = additional_config_params.get('api_id_name', 'HONEYDB_API_ID')
18+
api_key_name = additional_config_params.get('api_key_name', '')
19+
if not api_key_name:
20+
api_key_name = "AUTH0_KEY"
2021
api_key = secrets.get_secret(api_key_name)
21-
api_id = secrets.get_secret(api_id_name)
2222
if not api_key:
23-
raise AnalyzerRunException("no HoneyDB API Key retrieved")
24-
if not api_id:
25-
raise AnalyzerRunException("no HoneyDB API ID retrieved")
23+
raise AnalyzerRunException("no api key retrieved")
2624

2725
headers = {
28-
'X-HoneyDb-ApiKey': api_key,
29-
'X-HoneyDb-ApiId': api_id
26+
'X-Auth-Token': api_key
3027
}
31-
url = f'https://honeydb.io/api/twitter-threat-feed/{observable_name}'
28+
url = 'https://signals.api.auth0.com/v2.0/ip/{}'.format(observable_name)
3229
response = requests.get(url, headers=headers)
3330
response.raise_for_status()
3431

3532
json_response = response.json()
33+
# pprint.pprint(json_response)
3634
report['report'] = json_response
3735
except AnalyzerRunException as e:
3836
error_message = "job_id:{} analyzer:{} observable_name:{} Analyzer error {}" \
@@ -55,4 +53,5 @@ def run(analyzer_name, job_id, observable_name, observable_classification, addit
5553
logger.info("ended analyzer {} job_id {} observable {}"
5654
"".format(analyzer_name, job_id, observable_name))
5755

58-
return report
56+
return report
57+

0 commit comments

Comments
 (0)