-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsiteChecker.py
80 lines (68 loc) · 3.08 KB
/
siteChecker.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
#!/usr/bin/python3
#--------------------------------------------------------------------------------------------------
# Name: searcherAndChecker
#
#
# Arguments:
# query: string to pass to DuckDuckGo API as a search query
#
#
#
# About: Takes search query and passes it to DuckDuckgo API then get search resuts and passes them to
# VirusTotal's API. If one of the search results was detected by virustotal to have a virus then a message is
# printed to the user containing the URL of the infected site.
#
# code below the function body:
# Specific to this version the search query is user input from the console.
#
#--------------------------------------------------------------------------------------------------
from urllib.request import urlopen
import json
import requests
def searcherAndChecker(query):
obj = urlopen('https://api.duckduckgo.com/?q=' + str(query) + '&format=json&pretty=1') # search query
bytes = obj.read()
jtext = bytes.decode('utf8')
jobj = json.loads(jtext) # store json of query
increasable = True # variable to help determine if there're more links in the search to scan
index = 0; # index for json to go through and get the urls from the search
urls = [] # list for store all the site urls
while increasable: # loop to get all the urls from the search
try:
urls.append(jobj["RelatedTopics"][index]["FirstURL"])
index += 1
except IndexError:
increasable = False # there're no more urls so stop the loop
except KeyError:
try:
index2 = 0
while True:
urls.append(jobj["RelatedTopics"][index]["Topics"][index2]["FirstURL"])
index2 += 1
except:
index += 1
except:
index += 1
NumberOfSites = len(urls) # get the nymber of urls
counter = 0
while counter < NumberOfSites: # loop to go through all the urls and scan them
scanParams = {'apikey': 'xxxxccc',
'url': str(urls[counter])}
scanResponse = requests.post('https://www.virustotal.com/vtapi/v2/url/scan', data=scanParams)
reportParams = {'apikey': 'xxxxxxx',
'resource': str(urls[counter])}
reportResponse = requests.post('https://www.virustotal.com/vtapi/v2/url/report', params=reportParams)
try: # trying to translate response to json
jReport = reportResponse.json()
result = int(jReport['positives']) # check if the scanned site was detected to have any viruses
if result > 0:
print("the site " + str(urls[counter]) + "was detected by one of the scanners to have a virsus")
f = open('links.txt', 'a')
f.write(str(urls[counter]))
f.write('\n')
f.close()
counter += 1
except: # request lib failed to translate response to json move to the next
counter += 1
query = input('what do you want to search to have me check?\n')
searcherAndChecker(query)