Skip to content

Commit fd197ee

Browse files
committed
refactor, include curl verification commands, conslidate http clients
1 parent 1a3f4c1 commit fd197ee

13 files changed

+214
-157
lines changed

README.md

+27-5
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ Options:
2929
-h, --help show this help message and exit
3030
-t URL, --target=URL target url with the path
3131
-H HEADER, --header=HEADER
32-
Append Header to the request '{"Authorizathion":
32+
Append Header to the request '{"Authorization":
3333
"Bearer eyjt"}'
3434
-o OUTPUT_JSON, --output=OUTPUT_JSON
3535
Output results to stdout (JSON)
@@ -51,19 +51,41 @@ Starting...
5151
[LOW] GraphQL Playground UI (Information Leakage)
5252
[HIGH] Alias Overloading with 100+ aliases is allowed (Denial of Service)
5353
[HIGH] Queries are allowed with 1000+ of the same repeated field (Denial of Service)
54+
```
5455

56+
Test a website, dump to a parse-able JSON output, cURL reproduction command
57+
```
5558
python3 main.py -t https://mywebsite.com/graphql -o json
5659
57-
{'Field Suggestions': {'severity': 'LOW', 'impact': 'Information Leakage', 'description': 'Field Suggestions are Enabled'}, 'Introspection': {'severity': 'HIGH', 'impact': 'Information Leakage', 'description': 'Introspection Query Enabled'}, 'Possible CSRF (GET)': {'severity': 'LOW', 'impact': 'Possible CSRF', 'description': 'HTTP GET method supported (maybe CSRF)'}, 'Alias Overloading': {'severity': 'HIGH', 'impact': 'Denial of Service', 'description': 'Alias Overloading with 100+ aliases is allowed'}, 'Field Duplication': {'severity': 'HIGH', 'impact': 'Denial of Service', 'description': 'Queries are allowed with 1000+ of the same repeated field'}, 'Directive Overloading': {'severity': 'HIGH', 'impact': 'Denial of Service', 'description': 'Multiple duplicated directives allowed in a query'}}
60+
{'curl_verify': 'curl -X POST -H "User-Agent: graphql-cop/1.2" -H '
61+
'"Accept-Encoding: gzip, deflate" -H "Accept: */*" -H '
62+
'"Connection: keep-alive" -H "Content-Length: 33" -H '
63+
'"Content-Type: application/json" -d \'{"query": "query { '
64+
'__typename }"}\' \'http://localhost:5013/graphql\'',
65+
'description': 'Tracing is Enabled',
66+
'impact': 'Information Leakage',
67+
'result': False,
68+
'severity': 'INFO',
69+
'title': 'Trace Mode'},
70+
{'curl_verify': 'curl -X POST -H "User-Agent: graphql-cop/1.2" -H '
71+
'"Accept-Encoding: gzip, deflate" -H "Accept: */*" -H '
72+
'"Connection: keep-alive" -H "Content-Length: 64" -H '
73+
'"Content-Type: application/json" -d \'{"query": "query { '
74+
'__typename @aa@aa@aa@aa@aa@aa@aa@aa@aa@aa }"}\' '
75+
"'http://localhost:5013/graphql'",
76+
'description': 'Multiple duplicated directives allowed in a query',
77+
'impact': 'Denial of Service',
78+
'result': True,
79+
'severity': 'HIGH',
80+
'title': 'Directive Overloading'}]
5881
```
5982

60-
Test a website
61-
Using `graphql-cop` through a Proxy (Eg: Burp Suite) and adding custom headers (Eg: Authorization):
83+
Test a website using `graphql-cop` through a proxy (e.g. Burp Suite) with custom headers (e.g. Authorization):
6284

6385
```
6486
$ python3 graphql-cop.py -t https://mywebsite.com/graphql --proxy --header '{"Authorization": "Bearer token_here"}'
6587
66-
GraphQL Cop 1.1
88+
GraphQL Cop 1.2
6789
Security Auditor for GraphQL
6890
Dolev Farhi & Nick Aleks
6991

graphql-cop.py

+15-69
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
#!/usr/env/python3
22
import sys
33

4+
from json import loads
45
from optparse import OptionParser
56
from version import VERSION
67
from config import HEADERS
7-
from json import loads
88
from urllib.parse import urlparse
99

1010
from lib.tests.info_field_suggestions import field_suggestions
@@ -21,7 +21,7 @@
2121

2222
parser = OptionParser(usage='%prog -t http://example.com -o json')
2323
parser.add_option('-t', '--target', dest='url', help='target url with the path')
24-
parser.add_option('-H', '--header', dest='header', help='Append Header to the request \'{"Authorizathion": "Bearer eyjt"}\'')
24+
parser.add_option('-H', '--header', dest='header', help='Append Header to the request \'{"Authorization": "Bearer eyjt"}\'')
2525
parser.add_option('-o', '--output', dest='output_json',
2626
help='Output results to stdout (JSON)', default=False)
2727
parser.add_option('--proxy', '-x', dest='proxy', action='store_true', default=False,
@@ -55,7 +55,7 @@
5555
print("Cannot cast %s into header dictionary. Ensure the format \'{\"key\": \"value\"}\'."%(options.header))
5656

5757
if not urlparse(options.url).scheme:
58-
print("Url missing scheme (http:// or https://). Ensure Url contains a scheme.")
58+
print("URL missing scheme (http:// or https://). Ensure ULR contains some scheme.")
5959
sys.exit(1)
6060
else:
6161
url = options.url
@@ -64,73 +64,19 @@
6464
print(url, 'does not seem to be running GraphQL.')
6565
sys.exit(1)
6666

67-
json_output = {}
68-
69-
if field_suggestions(url, proxy, HEADERS):
70-
# Field Suggestions
71-
json_output['Field Suggestions'] = {}
72-
json_output['Field Suggestions']['severity'] = 'LOW'
73-
json_output['Field Suggestions']['impact'] = 'Information Leakage'
74-
json_output['Field Suggestions']['description'] = 'Field Suggestions are Enabled'
75-
76-
if introspection(url, proxy, HEADERS):
77-
# Introspection
78-
json_output['Introspection'] = {}
79-
json_output['Introspection']['severity'] = 'HIGH'
80-
json_output['Introspection']['impact'] = 'Information Leakage'
81-
json_output['Introspection']['description'] = 'Introspection Query Enabled'
82-
83-
if detect_graphiql(url, proxy, HEADERS):
84-
# Playground
85-
json_output['GraphiQL Playground'] = {}
86-
json_output['GraphiQL Playground']['severity'] = 'LOW'
87-
json_output['GraphiQL Playground']['impact'] = 'Information Leakage'
88-
json_output['GraphiQL Playground']['description'] = 'GraphiQL Explorer Enabled'
89-
90-
if get_method_support(url, proxy, HEADERS):
91-
# HTTP GET method support
92-
json_output['Possible CSRF (GET)'] = {}
93-
json_output['Possible CSRF (GET)']['severity'] = 'LOW'
94-
json_output['Possible CSRF (GET)']['impact'] = 'Possible CSRF'
95-
json_output['Possible CSRF (GET)']['description'] = 'HTTP GET method supported (maybe CSRF)'
96-
97-
if alias_overloading(url, proxy, HEADERS):
98-
# Alias Overloading
99-
json_output['Alias Overloading'] = {}
100-
json_output['Alias Overloading']['severity'] = 'HIGH'
101-
json_output['Alias Overloading']['impact'] = 'Denial of Service'
102-
json_output['Alias Overloading']['description'] = 'Alias Overloading with 100+ aliases is allowed'
103-
104-
if batch_query(url, proxy, HEADERS):
105-
# Batch Queries
106-
json_output['Batch Queries'] = {}
107-
json_output['Batch Queries']['severity'] = 'HIGH'
108-
json_output['Batch Queries']['impact'] = 'Denial of Service'
109-
json_output['Batch Queries']['description'] = 'Batch queries allowed with 10+ simultaneous queries)'
110-
111-
if field_duplication(url, proxy, HEADERS):
112-
# Field Duplication
113-
json_output['Field Duplication'] = {}
114-
json_output['Field Duplication']['severity'] = 'HIGH'
115-
json_output['Field Duplication']['impact'] = 'Denial of Service'
116-
json_output['Field Duplication']['description'] = 'Queries are allowed with 500 of the same repeated field'
117-
118-
if trace_mode(url, proxy, HEADERS):
119-
# Tracing mode
120-
json_output['Tracing Mode'] = {}
121-
json_output['Tracing Mode']['severity'] = 'INFORMATIONAL'
122-
json_output['Tracing Mode']['impact'] = 'Information Leakage'
123-
json_output['Tracing Mode']['description'] = 'Tracing is enabled'
67+
tests = [field_suggestions, introspection, detect_graphiql,
68+
get_method_support, alias_overloading, batch_query,
69+
field_duplication, trace_mode, directive_overloading]
12470

125-
if directive_overloading(url, proxy, HEADERS):
126-
# Directive Overloading
127-
json_output['Directive Overloading'] = {}
128-
json_output['Directive Overloading']['severity'] = 'HIGH'
129-
json_output['Directive Overloading']['impact'] = 'Denial of Service'
130-
json_output['Directive Overloading']['description'] = 'Multiple duplicated directives allowed in a query'
71+
json_output = []
13172

73+
for test in tests:
74+
json_output.append(test(url, proxy, HEADERS))
75+
13276
if options.output_json == 'json':
133-
print(json_output)
77+
from pprint import pprint
78+
pprint(json_output)
13479
else:
135-
for k, v in json_output.items():
136-
print('[{}] {} - {} ({})'.format(v['severity'], k, v['description'], v['impact']))
80+
for i in json_output:
81+
print('[{}] {} - {} ({})'.format(i['severity'], i['title'], i['description'], i['impact']))
82+

lib/tests/dos_alias_overloading.py

+15-6
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,30 @@
11
"""Alias overloading tests."""
2-
from lib.utils import graph_query
2+
from lib.utils import graph_query, curlify
33

44

55
def alias_overloading(url, proxy, headers):
66
"""Check for alias overloading."""
7-
result = False
7+
res = {
8+
'result':False,
9+
'title':'Alias Overloading',
10+
'description':'Alias Overloading with 100+ aliases is allowed',
11+
'impact':'Denial of Service',
12+
'severity':'HIGH',
13+
'curl_verify':''
14+
}
815
aliases = ''
916

1017
for i in range(0, 101):
1118
aliases += 'alias{}:__typename \n'.format(i)
1219

1320
gql_response = graph_query(url, proxies=proxy, headers=headers, payload='query { ' + aliases + ' }')
14-
21+
22+
res['curl_verify'] = curlify(gql_response)
23+
1524
try:
16-
if gql_response['data']['alias100']:
17-
result = True
25+
if gql_response.json()['data']['alias100']:
26+
res['result'] = True
1827
except:
1928
pass
2029

21-
return result
30+
return res

lib/tests/dos_batch.py

+16-7
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,26 @@
11
"""Batch tests."""
2-
from lib.utils import graph_batch_query
2+
from lib.utils import graph_query, curlify
33

44

55
def batch_query(url, proxy, headers):
66
"""Check for batch queries."""
7-
result = False
8-
9-
gql_response = graph_batch_query(url, proxies=proxy, headers=headers, payload='query { __typename }')
7+
res = {
8+
'result':False,
9+
'title':'Array-based Query Batching',
10+
'description':'Batch queries allowed with 10+ simultaneous queries)',
11+
'impact':'Denial of Service',
12+
'severity':'HIGH',
13+
'curl_verify':''
14+
}
1015

16+
gql_response = graph_query(url, proxies=proxy, headers=headers, payload='query { __typename }', batch=True)
17+
18+
res['curl_verify'] = curlify(gql_response)
19+
1120
try:
12-
if len(gql_response) >= 10:
13-
result = True
21+
if len(gql_response.json()) >= 10:
22+
res['result'] = True
1423
except:
1524
pass
1625

17-
return result
26+
return res
+15-6
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,26 @@
11
"""Directive overloading tests."""
2-
from lib.utils import graph_query
2+
from lib.utils import graph_query, curlify
3+
34

45
def directive_overloading(url, proxy, headers):
56
"""Check for directive overloading."""
6-
result = False
7+
res = {
8+
'result':False,
9+
'title':'Directive Overloading',
10+
'description':'Multiple duplicated directives allowed in a query',
11+
'impact':'Denial of Service',
12+
'severity':'HIGH',
13+
'curl_verify':''
14+
}
715

816
q = 'query { __typename @aa@aa@aa@aa@aa@aa@aa@aa@aa@aa }'
917
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
10-
18+
res['curl_verify'] = curlify(gql_response)
19+
1120
try:
12-
if len(gql_response['errors']) == 10:
13-
result = True
21+
if len(gql_response.json()['errors']) == 10:
22+
res['result'] = True
1423
except:
1524
pass
1625

17-
return result
26+
return res

lib/tests/dos_field_duplication.py

+14-5
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,27 @@
11
"""Field duplication tests."""
2-
from lib.utils import graph_query
2+
from lib.utils import graph_query, curlify
33

44

55
def field_duplication(url, proxy, headers):
66
"""Check for field duplication."""
7-
result = False
7+
res = {
8+
'result':False,
9+
'title':'Field Duplication',
10+
'description':'Queries are allowed with 500 of the same repeated field',
11+
'impact':'Denial of Service',
12+
'severity':'HIGH',
13+
'curl_verify':''
14+
}
815

916
duplicated_string = '__typename \n' * 500
1017
q = 'query { ' + duplicated_string + '} '
1118
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
19+
res['curl_verify'] = curlify(gql_response)
20+
1221
try:
13-
if gql_response['data']['__typename']:
14-
result = True
22+
if gql_response.json()['data']['__typename']:
23+
res['result'] = True
1524
except:
1625
pass
1726

18-
return result
27+
return res

lib/tests/info_field_suggestions.py

+15-5
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,27 @@
11
"""Field suggestions tests."""
2-
from lib.utils import graph_query, get_error
2+
from lib.utils import graph_query, get_error, curlify
33

44

55
def field_suggestions(url, proxy, headers):
66
"""Retrieve field suggestions."""
7-
result = False
7+
res = {
8+
'result':False,
9+
'title':'Field Suggestions',
10+
'description':'Field Suggestions are Enabled',
11+
'impact':'Information Leakage',
12+
'severity':'LOW',
13+
'curl_verify':''
14+
}
815

916
q = 'query { __schema { directive } }'
1017
gql_response = graph_query(url, proxies=proxy, headers=headers, payload=q)
18+
res['curl_verify'] = curlify(gql_response)
19+
20+
1121
try:
12-
if 'Did you mean' in get_error(gql_response):
13-
result = True
22+
if 'Did you mean' in get_error(gql_response.json()):
23+
res['result'] = True
1424
except:
1525
pass
1626

17-
return result
27+
return res

lib/tests/info_get_method_support.py

+13-5
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,27 @@
11
"""Collect all supported methods."""
2-
from lib.utils import request_get
2+
from lib.utils import request_get, curlify
33

44

55
def get_method_support(url, proxies, headers):
66
"""Get the supported methods."""
7-
result = False
7+
res = {
8+
'result':False,
9+
'title':'GET Method Query Support',
10+
'description':'GraphQL queries allowed using the GET method',
11+
'impact':'Possible Cross Site Request Forgery (CSRF)',
12+
'severity':'LOW',
13+
'curl_verify':''
14+
}
815

916
q = '{__typename}'
1017

1118
response = request_get(url, proxies=proxies, headers=headers, params={'query':q})
12-
19+
res['curl_verify'] = curlify(response)
20+
1321
try:
1422
if response and response.json()['data']['__typename']:
15-
result = True
23+
res['result'] = True
1624
except:
1725
pass
1826

19-
return result
27+
return res

0 commit comments

Comments
 (0)