Skip to content

Commit

Permalink
refactor, include curl verification commands, conslidate http clients
Browse files Browse the repository at this point in the history
  • Loading branch information
dolevf committed Mar 19, 2022
1 parent 1a3f4c1 commit fd197ee
Show file tree
Hide file tree
Showing 13 changed files with 214 additions and 157 deletions.
32 changes: 27 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ Options:
-h, --help show this help message and exit
-t URL, --target=URL target url with the path
-H HEADER, --header=HEADER
Append Header to the request '{"Authorizathion":
Append Header to the request '{"Authorization":
"Bearer eyjt"}'
-o OUTPUT_JSON, --output=OUTPUT_JSON
Output results to stdout (JSON)
Expand All @@ -51,19 +51,41 @@ Starting...
[LOW] GraphQL Playground UI (Information Leakage)
[HIGH] Alias Overloading with 100+ aliases is allowed (Denial of Service)
[HIGH] Queries are allowed with 1000+ of the same repeated field (Denial of Service)
```

Test a website, dump to a parse-able JSON output, cURL reproduction command
```
python3 main.py -t https://mywebsite.com/graphql -o json
{'Field Suggestions': {'severity': 'LOW', 'impact': 'Information Leakage', 'description': 'Field Suggestions are Enabled'}, 'Introspection': {'severity': 'HIGH', 'impact': 'Information Leakage', 'description': 'Introspection Query Enabled'}, 'Possible CSRF (GET)': {'severity': 'LOW', 'impact': 'Possible CSRF', 'description': 'HTTP GET method supported (maybe CSRF)'}, 'Alias Overloading': {'severity': 'HIGH', 'impact': 'Denial of Service', 'description': 'Alias Overloading with 100+ aliases is allowed'}, 'Field Duplication': {'severity': 'HIGH', 'impact': 'Denial of Service', 'description': 'Queries are allowed with 1000+ of the same repeated field'}, 'Directive Overloading': {'severity': 'HIGH', 'impact': 'Denial of Service', 'description': 'Multiple duplicated directives allowed in a query'}}
{'curl_verify': 'curl -X POST -H "User-Agent: graphql-cop/1.2" -H '
'"Accept-Encoding: gzip, deflate" -H "Accept: */*" -H '
'"Connection: keep-alive" -H "Content-Length: 33" -H '
'"Content-Type: application/json" -d \'{"query": "query { '
'__typename }"}\' \'http://localhost:5013/graphql\'',
'description': 'Tracing is Enabled',
'impact': 'Information Leakage',
'result': False,
'severity': 'INFO',
'title': 'Trace Mode'},
{'curl_verify': 'curl -X POST -H "User-Agent: graphql-cop/1.2" -H '
'"Accept-Encoding: gzip, deflate" -H "Accept: */*" -H '
'"Connection: keep-alive" -H "Content-Length: 64" -H '
'"Content-Type: application/json" -d \'{"query": "query { '
'__typename @aa@aa@aa@aa@aa@aa@aa@aa@aa@aa }"}\' '
"'http://localhost:5013/graphql'",
'description': 'Multiple duplicated directives allowed in a query',
'impact': 'Denial of Service',
'result': True,
'severity': 'HIGH',
'title': 'Directive Overloading'}]
```

Test a website
Using `graphql-cop` through a Proxy (Eg: Burp Suite) and adding custom headers (Eg: Authorization):
Test a website using `graphql-cop` through a proxy (e.g. Burp Suite) with custom headers (e.g. Authorization):

```
$ python3 graphql-cop.py -t https://mywebsite.com/graphql --proxy --header '{"Authorization": "Bearer token_here"}'
GraphQL Cop 1.1
GraphQL Cop 1.2
Security Auditor for GraphQL
Dolev Farhi & Nick Aleks
Expand Down
84 changes: 15 additions & 69 deletions graphql-cop.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/env/python3
import sys

from json import loads
from optparse import OptionParser
from version import VERSION
from config import HEADERS
from json import loads
from urllib.parse import urlparse

from lib.tests.info_field_suggestions import field_suggestions
Expand All @@ -21,7 +21,7 @@

parser = OptionParser(usage='%prog -t http://example.com -o json')
parser.add_option('-t', '--target', dest='url', help='target url with the path')
parser.add_option('-H', '--header', dest='header', help='Append Header to the request \'{"Authorizathion": "Bearer eyjt"}\'')
parser.add_option('-H', '--header', dest='header', help='Append Header to the request \'{"Authorization": "Bearer eyjt"}\'')
parser.add_option('-o', '--output', dest='output_json',
help='Output results to stdout (JSON)', default=False)
parser.add_option('--proxy', '-x', dest='proxy', action='store_true', default=False,
Expand Down Expand Up @@ -55,7 +55,7 @@
print("Cannot cast %s into header dictionary. Ensure the format \'{\"key\": \"value\"}\'."%(options.header))

if not urlparse(options.url).scheme:
print("Url missing scheme (http:// or https://). Ensure Url contains a scheme.")
print("URL missing scheme (http:// or https://). Ensure ULR contains some scheme.")
sys.exit(1)
else:
url = options.url
Expand All @@ -64,73 +64,19 @@
print(url, 'does not seem to be running GraphQL.')
sys.exit(1)

json_output = {}

if field_suggestions(url, proxy, HEADERS):
# Field Suggestions
json_output['Field Suggestions'] = {}
json_output['Field Suggestions']['severity'] = 'LOW'
json_output['Field Suggestions']['impact'] = 'Information Leakage'
json_output['Field Suggestions']['description'] = 'Field Suggestions are Enabled'

if introspection(url, proxy, HEADERS):
# Introspection
json_output['Introspection'] = {}
json_output['Introspection']['severity'] = 'HIGH'
json_output['Introspection']['impact'] = 'Information Leakage'
json_output['Introspection']['description'] = 'Introspection Query Enabled'

if detect_graphiql(url, proxy, HEADERS):
# Playground
json_output['GraphiQL Playground'] = {}
json_output['GraphiQL Playground']['severity'] = 'LOW'
json_output['GraphiQL Playground']['impact'] = 'Information Leakage'
json_output['GraphiQL Playground']['description'] = 'GraphiQL Explorer Enabled'

if get_method_support(url, proxy, HEADERS):
# HTTP GET method support
json_output['Possible CSRF (GET)'] = {}
json_output['Possible CSRF (GET)']['severity'] = 'LOW'
json_output['Possible CSRF (GET)']['impact'] = 'Possible CSRF'
json_output['Possible CSRF (GET)']['description'] = 'HTTP GET method supported (maybe CSRF)'

if alias_overloading(url, proxy, HEADERS):
# Alias Overloading
json_output['Alias Overloading'] = {}
json_output['Alias Overloading']['severity'] = 'HIGH'
json_output['Alias Overloading']['impact'] = 'Denial of Service'
json_output['Alias Overloading']['description'] = 'Alias Overloading with 100+ aliases is allowed'

if batch_query(url, proxy, HEADERS):
# Batch Queries
json_output['Batch Queries'] = {}
json_output['Batch Queries']['severity'] = 'HIGH'
json_output['Batch Queries']['impact'] = 'Denial of Service'
json_output['Batch Queries']['description'] = 'Batch queries allowed with 10+ simultaneous queries)'

if field_duplication(url, proxy, HEADERS):
# Field Duplication
json_output['Field Duplication'] = {}
json_output['Field Duplication']['severity'] = 'HIGH'
json_output['Field Duplication']['impact'] = 'Denial of Service'
json_output['Field Duplication']['description'] = 'Queries are allowed with 500 of the same repeated field'

if trace_mode(url, proxy, HEADERS):
# Tracing mode
json_output['Tracing Mode'] = {}
json_output['Tracing Mode']['severity'] = 'INFORMATIONAL'
json_output['Tracing Mode']['impact'] = 'Information Leakage'
json_output['Tracing Mode']['description'] = 'Tracing is enabled'
tests = [field_suggestions, introspection, detect_graphiql,
get_method_support, alias_overloading, batch_query,
field_duplication, trace_mode, directive_overloading]

if directive_overloading(url, proxy, HEADERS):
# Directive Overloading
json_output['Directive Overloading'] = {}
json_output['Directive Overloading']['severity'] = 'HIGH'
json_output['Directive Overloading']['impact'] = 'Denial of Service'
json_output['Directive Overloading']['description'] = 'Multiple duplicated directives allowed in a query'
json_output = []

for test in tests:
json_output.append(test(url, proxy, HEADERS))

if options.output_json == 'json':
print(json_output)
from pprint import pprint
pprint(json_output)
else:
for k, v in json_output.items():
print('[{}] {} - {} ({})'.format(v['severity'], k, v['description'], v['impact']))
for i in json_output:
print('[{}] {} - {} ({})'.format(i['severity'], i['title'], i['description'], i['impact']))

21 changes: 15 additions & 6 deletions lib/tests/dos_alias_overloading.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,30 @@
"""Alias overloading tests."""
from lib.utils import graph_query
from lib.utils import graph_query, curlify


def alias_overloading(url, proxy, headers):
"""Check for alias overloading."""
result = False
res = {
'result':False,
'title':'Alias Overloading',
'description':'Alias Overloading with 100+ aliases is allowed',
'impact':'Denial of Service',
'severity':'HIGH',
'curl_verify':''
}
aliases = ''

for i in range(0, 101):
aliases += 'alias{}:__typename \n'.format(i)

gql_response = graph_query(url, proxies=proxy, headers=headers, payload='query { ' + aliases + ' }')


res['curl_verify'] = curlify(gql_response)

try:
if gql_response['data']['alias100']:
result = True
if gql_response.json()['data']['alias100']:
res['result'] = True
except:
pass

return result
return res
23 changes: 16 additions & 7 deletions lib/tests/dos_batch.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,26 @@
"""Batch tests."""
from lib.utils import graph_batch_query
from lib.utils import graph_query, curlify


def batch_query(url, proxy, headers):
"""Check for batch queries."""
result = False

gql_response = graph_batch_query(url, proxies=proxy, headers=headers, payload='query { __typename }')
res = {
'result':False,
'title':'Array-based Query Batching',
'description':'Batch queries allowed with 10+ simultaneous queries)',
'impact':'Denial of Service',
'severity':'HIGH',
'curl_verify':''
}

gql_response = graph_query(url, proxies=proxy, headers=headers, payload='query { __typename }', batch=True)

res['curl_verify'] = curlify(gql_response)

try:
if len(gql_response) >= 10:
result = True
if len(gql_response.json()) >= 10:
res['result'] = True
except:
pass

return result
return res
21 changes: 15 additions & 6 deletions lib/tests/dos_directive_overloading.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,26 @@
"""Directive overloading tests."""
from lib.utils import graph_query
from lib.utils import graph_query, curlify


def directive_overloading(url, proxy, headers):
"""Check for directive overloading."""
result = False
res = {
'result':False,
'title':'Directive Overloading',
'description':'Multiple duplicated directives allowed in a query',
'impact':'Denial of Service',
'severity':'HIGH',
'curl_verify':''
}

q = 'query { __typename @aa@aa@aa@aa@aa@aa@aa@aa@aa@aa }'
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)

res['curl_verify'] = curlify(gql_response)

try:
if len(gql_response['errors']) == 10:
result = True
if len(gql_response.json()['errors']) == 10:
res['result'] = True
except:
pass

return result
return res
19 changes: 14 additions & 5 deletions lib/tests/dos_field_duplication.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,27 @@
"""Field duplication tests."""
from lib.utils import graph_query
from lib.utils import graph_query, curlify


def field_duplication(url, proxy, headers):
"""Check for field duplication."""
result = False
res = {
'result':False,
'title':'Field Duplication',
'description':'Queries are allowed with 500 of the same repeated field',
'impact':'Denial of Service',
'severity':'HIGH',
'curl_verify':''
}

duplicated_string = '__typename \n' * 500
q = 'query { ' + duplicated_string + '} '
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)

try:
if gql_response['data']['__typename']:
result = True
if gql_response.json()['data']['__typename']:
res['result'] = True
except:
pass

return result
return res
20 changes: 15 additions & 5 deletions lib/tests/info_field_suggestions.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,27 @@
"""Field suggestions tests."""
from lib.utils import graph_query, get_error
from lib.utils import graph_query, get_error, curlify


def field_suggestions(url, proxy, headers):
"""Retrieve field suggestions."""
result = False
res = {
'result':False,
'title':'Field Suggestions',
'description':'Field Suggestions are Enabled',
'impact':'Information Leakage',
'severity':'LOW',
'curl_verify':''
}

q = 'query { __schema { directive } }'
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)


try:
if 'Did you mean' in get_error(gql_response):
result = True
if 'Did you mean' in get_error(gql_response.json()):
res['result'] = True
except:
pass

return result
return res
18 changes: 13 additions & 5 deletions lib/tests/info_get_method_support.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,27 @@
"""Collect all supported methods."""
from lib.utils import request_get
from lib.utils import request_get, curlify


def get_method_support(url, proxies, headers):
"""Get the supported methods."""
result = False
res = {
'result':False,
'title':'GET Method Query Support',
'description':'GraphQL queries allowed using the GET method',
'impact':'Possible Cross Site Request Forgery (CSRF)',
'severity':'LOW',
'curl_verify':''
}

q = '{__typename}'

response = request_get(url, proxies=proxies, headers=headers, params={'query':q})

res['curl_verify'] = curlify(response)

try:
if response and response.json()['data']['__typename']:
result = True
res['result'] = True
except:
pass

return result
return res
Loading

0 comments on commit fd197ee

Please sign in to comment.