Skip to content

Commit

Permalink
Merge pull request #1262 from vpiserchia/quick-updates
Browse files Browse the repository at this point in the history
Quick updates
  • Loading branch information
nusantara-self authored Oct 17, 2024
2 parents 295562a + df0c513 commit 5eca0e4
Show file tree
Hide file tree
Showing 9 changed files with 89 additions and 49 deletions.
37 changes: 33 additions & 4 deletions analyzers/AbuseIPDB/abuseipdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ class AbuseIPDBAnalyzer(Analyzer):
def extract_abuse_ipdb_category(category_number):
# Reference: https://www.abuseipdb.com/categories
mapping = {
"1": "DNS Compromise",
"2": "DNS Poisoning",
"3": "Fraud Orders",
"4": "DDOS Attack",
"5": "FTP Brute-Force",
Expand All @@ -36,7 +38,7 @@ def extract_abuse_ipdb_category(category_number):
"22": "SSH",
"23": "IoT Targeted",
}
return mapping.get(str(category_number), 'unknown category')
return mapping.get(str(category_number), 'Unknown Category')

def run(self):

Expand Down Expand Up @@ -76,11 +78,38 @@ def run(self):
except Exception as e:
self.unexpectedError(e)


def summary(self, raw):
taxonomies = []
taxonomies = [] # level, namespace, predicate, value

is_whitelisted = False
data = {}
if raw and 'values' in raw:
data = raw['values'][0]['data']
else:
return {'taxonomies': []}

if data.get('isWhitelisted', False):
is_whitelisted = True
taxonomies.append(self.build_taxonomy('info', 'AbuseIPDB', 'Is Whitelist', 'True'))

if data.get('isTor', False):
taxonomies.append(self.build_taxonomy('info', 'AbuseIPDB', 'Is Tor', 'True'))

if raw and 'values' in raw and raw['values'][0]['data']['totalReports'] > 0 :
taxonomies.append(self.build_taxonomy('malicious', 'AbuseIPDB', 'Records', raw['values'][0]['data']['totalReports']))
if 'usageType' in data:
taxonomies.append(self.build_taxonomy('info', 'AbuseIPDB', 'Usage Type', data['usageType']))

if 'abuseConfidenceScore' in data:
if data['abuseConfidenceScore'] > 0:
taxonomies.append(self.build_taxonomy('suspicious', 'AbuseIPDB', 'Abuse Confidence Score', data['abuseConfidenceScore']))
else:
taxonomies.append(self.build_taxonomy('safe', 'AbuseIPDB', 'Abuse Confidence Score', 0))

if data['totalReports'] > 0 :
if is_whitelisted:
taxonomies.append(self.build_taxonomy('info', 'AbuseIPDB', 'Records', data['totalReports']))
else:
taxonomies.append(self.build_taxonomy('malicious', 'AbuseIPDB', 'Records', data['totalReports']))
else:
taxonomies.append(self.build_taxonomy('safe', 'AbuseIPDB', 'Records', 0))

Expand Down
18 changes: 9 additions & 9 deletions analyzers/Abuse_Finder/abusefinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@
class AbuseFinderAnalyzer(Analyzer):

def summary(self, raw):

taxonomies = []
if raw['abuse_finder'] and raw['abuse_finder'].get('abuse'):
for abuse in raw['abuse_finder']['abuse']:
taxonomies.append(self.build_taxonomy("info", "Abuse_Finder", "Address", abuse))
else:
taxonomies.append(self.build_taxonomy("info", "Abuse_Finder", "Address", "None"))
return {"taxonomies": taxonomies}

return {}
try:
if raw and raw['abuse_finder'].get('abuse'):
for abuse in raw['abuse_finder']['abuse']:
taxonomies.append(self.build_taxonomy("info", "Abuse_Finder", "Address", abuse))
else:
taxonomies.append(self.build_taxonomy("info", "Abuse_Finder", "Address", "None"))
except:
pass
return {"taxonomies": taxonomies}

def abuse(self):
if self.data_type == "ip":
Expand Down
2 changes: 1 addition & 1 deletion analyzers/Censys/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
cortexutils
censys==1.1.1
censys==2.2.11
23 changes: 12 additions & 11 deletions analyzers/MISPWarningLists/mispwarninglists.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,18 +161,19 @@ def run(self):
"SELECT list_name, list_version, concat(subdomain, '.', domain, '.', tld) as value FROM warninglists WHERE (subdomain = '%s' or subdomain = '*') and domain = '%s' and tld = '%s'"
% (subdomain, domain, tld)
)
values = self.engine.execute(sql)
with self.engine.connect() as conn:
values = conn.execute(db.text(sql))
if values.rowcount > 0:
for row in values:
results.append(
{
key: value
for (key, value) in zip(
["list_name", "list_version", "value"], row
)
}
)
self.engine.dispose()
if values.rowcount > 0:
for row in values:
results.append(
{
key: value
for (key, value) in zip(
["list_name", "list_version", "value"], row
)
}
)
self.report({"results": results, "mode": "db", "is_uptodate": "N/A"})

def summary(self, raw):
Expand Down
16 changes: 10 additions & 6 deletions analyzers/MISPWarningLists/warninglists_create_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,15 @@


import psycopg2.extras
from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey, Index, create_engine
from sqlalchemy.exc import ArgumentError
from sqlalchemy import Table, Column, Integer, String, MetaData, Index, create_engine
from sqlalchemy.sql import select
from sqlalchemy.dialects.postgresql import CIDR

conn_string = "<insert_postgres_conn_strin>"
warninglists_path = "misp-warninglists/**/list.json"

engine = create_engine(conn_string, use_batch_mode=True)
engine = create_engine(conn_string)
conn = engine.connect()

# UPDATE TLD FROM MOZILLA
Expand Down Expand Up @@ -148,7 +149,10 @@


# CHECK IF OLD RELEASE ARE IN DB
s = select([warninglists.c.list_name, warninglists.c.list_version]).distinct()
try:
s = select([warninglists.c.list_name, warninglists.c.list_version]).distinct()
except ArgumentError:
s = select(warninglists.c.list_name, warninglists.c.list_version).distinct()
last_versions = [x for x in conn.execute(s)]
print(f"{len(last_versions)} list already available in db")

Expand Down Expand Up @@ -189,13 +193,13 @@
try:
warninglists_address_idx.create(engine)
except:
logging.error(f"warninglists_address_idx already exists")
logging.error("warninglists_address_idx already exists")
try:
warninglists_hash_idx.create(engine)
except:
logging.error(f"warninglists_hash_idx already exists")
logging.error("warninglists_hash_idx already exists")
try:
warninglists_domain_idx.create(engine)
except:
logging.error(f"warninglists_domain_idx already exists")
logging.error("warninglists_domain_idx already exists")
engine.dispose()
4 changes: 2 additions & 2 deletions analyzers/MalwareClustering/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@ requests
pyimpfuzzy==0.5
# py2neo is EOL and older versions were deleted from pipy https://github.com/neo4j-contrib/py2neo
py2neo==2021.2.4
apiscout==1.1.5
python-magic==0.4.22
apiscout
python-magic==0.4.27
2 changes: 1 addition & 1 deletion analyzers/Malwares/malwares_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class Api():

def __init__(self, api_key=None):
self.api_key = api_key
self.base = 'https://public.api.malwares.com/v3/'
self.base = 'https://public.api.ctx.io/api/v22/'
self.version = 2
if api_key is None:
raise ApiError("You must supply a valid Malwares API key.")
Expand Down
6 changes: 5 additions & 1 deletion analyzers/ProofPoint/proofpoint_lookup.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,11 @@ def run(self):
filename = self.get_param('attachment.name', 'noname.ext')
filepath = self.get_param('file', None, 'File is missing')
with open(filepath, "rb") as f:
digest = hashlib.file_digest(f, "sha256")
try:
digest = hashlib.file_digest(f, "sha256")
except AttributeError:
# python 3.9
digest = hashlib.sha256(open(filepath, 'r').read())
sha256 = digest.hexdigest()
elif self.data_type == 'hash' and len(self.get_data()) == 64:
sha256 = self.get_data()
Expand Down
30 changes: 16 additions & 14 deletions analyzers/Virusshare/getHashes.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


display_usage() {
echo "getHashes v0.2"
echo "getHashes v0.3"
echo " Fetch all Virusshare.com hashes"
echo -e "\n Usage: $0 <path> \n"
}
Expand All @@ -20,17 +20,19 @@ if [ ! -d $1 ]; then

fi

cd $1
for u in `curl https://virusshare.com/hashes.4n6 | grep -E "VirusShare_[0-9]{5}\.md5" | c\
ut -d\" -f2 | cut -d\/ -f2`
WD=$1
declare -a base_urls=($(printf 'url=https://virusshare.com/hashfiles/%0.s\n' {1..1}))
declare -a base_outs=($(printf 'output=./%0.s\n' {1..1}))

pushd $WD
while mapfile -t -n 8 ary && ((${#ary[@]}));
do
echo $u
if [ -e $1/$u ]; then
echo "File already downloaded"
else
wget https://virusshare.com/hashes/$u
sleep 3
fi

done | tee -a ../$0.log
cd ..
rm -f ../config
IFS=,
eval echo "${base_urls[*]}"{"${ary[*]}"} | tr " " "\n" >> ../config
eval echo "${base_outs[*]}"{"${ary[*]}"} | tr " " "\n" >> ../config
curl -s -N --parallel --parallel-immediate --parallel-max 8 --config config | tee -a ../$0.log
sleep 3
done <<< `curl -s -L https://virusshare.com/hashes.4n6 | grep -E "VirusShare_[0-9]{5}\.md5" | cut -d\" -f2 | cut -d\/ -f2`
popd

0 comments on commit 5eca0e4

Please sign in to comment.