Skip to content

Commit

Permalink
Merge pull request #14 from dolevf/break-apart-tests
Browse files Browse the repository at this point in the history
improved error handling, operation name additions, split tests
  • Loading branch information
dolevf authored Aug 27, 2022
2 parents 02a9f21 + f8be1e4 commit 8788e5c
Show file tree
Hide file tree
Showing 15 changed files with 55 additions and 30 deletions.
4 changes: 3 additions & 1 deletion graphql-cop.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from lib.tests.dos_circular_introspection import circular_query_introspection
from lib.tests.info_get_based_mutation import get_based_mutation
from lib.tests.info_post_based_csrf import post_based_csrf
from lib.tests.info_unhandled_error import unhandled_error_detection
from lib.utils import is_graphql, draw_art


Expand Down Expand Up @@ -71,7 +72,8 @@
tests = [field_suggestions, introspection, detect_graphiql,
get_method_support, alias_overloading, batch_query,
field_duplication, trace_mode, directive_overloading,
circular_query_introspection, get_based_mutation, post_based_csrf]
circular_query_introspection, get_based_mutation, post_based_csrf,
unhandled_error_detection]

json_output = []

Expand Down
6 changes: 3 additions & 3 deletions lib/tests/dos_alias_overloading.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@ def alias_overloading(url, proxy, headers):
for i in range(0, 101):
aliases += 'alias{}:__typename \n'.format(i)

gql_response = graph_query(url, proxies=proxy, headers=headers, payload='query { ' + aliases + ' }')
gql_response = graph_query(url, proxies=proxy, headers=headers, payload='query cop { ' + aliases + ' }')

res['curl_verify'] = curlify(gql_response)

try:
if gql_response.json()['data']['alias100']:
res['result'] = True
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/dos_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def batch_query(url, proxy, headers):
'curl_verify':''
}

gql_response = graph_query(url, proxies=proxy, headers=headers, payload='query { __typename }', batch=True)
gql_response = graph_query(url, proxies=proxy, headers=headers, payload='query cop { __typename }', batch=True)

res['curl_verify'] = curlify(gql_response)

Expand Down
2 changes: 1 addition & 1 deletion lib/tests/dos_circular_introspection.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def circular_query_introspection(url, proxy, headers):
'curl_verify':''
}

q = 'query { __schema { types { fields { type { fields { type { fields { type { fields { type { name } } } } } } } } } } }'
q = 'query cop { __schema { types { fields { type { fields { type { fields { type { fields { type { name } } } } } } } } } } }'

gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)
Expand Down
4 changes: 2 additions & 2 deletions lib/tests/dos_directive_overloading.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ def directive_overloading(url, proxy, headers):
'curl_verify':''
}

q = 'query { __typename @aa@aa@aa@aa@aa@aa@aa@aa@aa@aa }'
q = 'query cop { __typename @aa@aa@aa@aa@aa@aa@aa@aa@aa@aa }'
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)

try:
if len(gql_response.json()['errors']) == 10:
res['result'] = True
Expand Down
4 changes: 2 additions & 2 deletions lib/tests/dos_field_duplication.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@ def field_duplication(url, proxy, headers):
}

duplicated_string = '__typename \n' * 500
q = 'query { ' + duplicated_string + '} '
q = 'query cop { ' + duplicated_string + '} '
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)

try:
if gql_response.json()['data']['__typename']:
res['result'] = True
Expand Down
3 changes: 1 addition & 2 deletions lib/tests/info_field_suggestions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,9 @@ def field_suggestions(url, proxy, headers):
'curl_verify':''
}

q = 'query { __schema { directive } }'
q = 'query cop { __schema { directive } }'
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)


try:
if 'Did you mean' in get_error(gql_response.json()):
Expand Down
3 changes: 1 addition & 2 deletions lib/tests/info_get_based_mutation.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,10 @@ def get_based_mutation(url, proxies, headers):
'curl_verify':''
}

q = 'mutation {__typename}'
q = 'mutation cop {__typename}'

response = request(url, proxies=proxies, headers=headers, params={'query':q})
res['curl_verify'] = curlify(response)

try:
if response and response.json()['data']['__typename']:
res['result'] = True
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/info_get_method_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def get_method_support(url, proxies, headers):
'curl_verify':''
}

q = '{__typename}'
q = 'query cop {__typename}'

response = request(url, proxies=proxies, headers=headers, params={'query':q})
res['curl_verify'] = curlify(response)
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/info_introspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def introspection(url, proxy, headers):
'curl_verify':''
}

q = 'query { __schema { types { name fields { name } } } }'
q = 'query cop { __schema { types { name fields { name } } } }'

gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)
Expand Down
2 changes: 1 addition & 1 deletion lib/tests/info_post_based_csrf.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def post_based_csrf(url, proxies, headers):
'curl_verify':''
}

q = 'query {__typename}'
q = 'query cop {__typename}'

response = request(url, proxies=proxies, headers=headers, params={'query':q}, verb='POST')
res['curl_verify'] = curlify(response)
Expand Down
8 changes: 3 additions & 5 deletions lib/tests/info_trace_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,12 @@ def trace_mode(url, proxy, headers):
'curl_verify':''
}

q = 'query { __typename }'
q = 'query cop { __typename }'
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)

try:
if gql_response.json().get('errors', {}).get('extensions', {}).get('tracing'):
res['result'] = True
elif gql_response.json().get('errors', {}).get('extensions', {}).get('exception', None):
if gql_response.json()['errors'][0]['extensions']['tracing']:
res['result'] = True
elif 'stacktrace' in str(gql_response.json()).lower():
res['result'] = True
Expand Down
28 changes: 28 additions & 0 deletions lib/tests/info_unhandled_error.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""Collect trace mode details."""
from lib.utils import graph_query, curlify


def unhandled_error_detection(url, proxy, headers):
"""Get the trace mode."""
res = {
'result':False,
'title':'Unhandled Errors Detection',
'description':'Exception errors are not handled',
'impact':'Information Leakage',
'severity':'INFO',
'curl_verify':''
}

q = 'qwerty cop { abc }'
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
res['curl_verify'] = curlify(gql_response)

try:
if gql_response.json()['errors'][0]['extensions']['exception']:
res['result'] = True
elif 'exception' in str(gql_response.json()).lower():
res['result'] = True
except:
pass

return res
13 changes: 6 additions & 7 deletions lib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,12 @@ def get_error(resp):

def graph_query(url, proxies, headers, operation='query', payload={}, batch=False):
"""Perform a query."""

if batch:
data = []
for _ in range(10):
data.append({operation:payload})
else:
data = {operation:payload}

data = {operation:payload, "operationName":"cop"}
try:
response = requests.post(url,
headers=headers,
Expand Down Expand Up @@ -73,7 +71,7 @@ def request(url, proxies, headers, params=None, data=None, verb='GET'):
def is_graphql(url, proxies, headers):
"""Check if the URL provides a GraphQL interface."""
query = '''
query {
query cop {
__typename
}
'''
Expand All @@ -86,9 +84,10 @@ def is_graphql(url, proxies, headers):
except JSONDecodeError:
return False

if response.json().get('data', {}).get('__typename', '') in ('Query', 'QueryRoot', 'query_root'):
return True
elif response.json().get('errors') and (any('locations' in i for i in response['errors']) or (any('extensions' in i for i in response))):
if 'data' in response.json() and response.json()['data'] != None:
if response.json()['data']['__typename'] in ('Query', 'QueryRoot', 'query_root', 'Root'):
return True
elif response.json().get('errors') and (any('locations' in i for i in response.json()['errors']) or (any('extensions' in i for i in response.json()))):
return True
elif response.json().get('data'):
return True
Expand Down
2 changes: 1 addition & 1 deletion version.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
"""Version details of graphql-cop."""
VERSION = '1.6'
VERSION = '1.7'

0 comments on commit 8788e5c

Please sign in to comment.