Skip to content

Commit

Permalink
big refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
gwen001 committed Dec 12, 2019
1 parent 269aa89 commit 1bfb8f8
Show file tree
Hide file tree
Showing 3 changed files with 466 additions and 68 deletions.
216 changes: 200 additions & 16 deletions crlf.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,15 @@
import sys
import re
import time
import copy
import random
import argparse
import requests
import urllib.parse
from functools import partial
from threading import Thread
from queue import Queue
from urllib.parse import urlparse
from multiprocessing.dummy import Pool
from colored import fg, bg, attr

MAX_EXCEPTION = 3
Expand All @@ -23,14 +26,105 @@
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)



def rebuiltQuery( t_params ):
query = ''
for pname,t_values in t_params.items():
for k in range(len(t_values)):
query = query + pname+'='+t_values[k] + '&'
return query.strip('&')


def _parse_qs( query ):
t_params = {}
tmptab = query.split('&')

for param in tmptab:
t_param = param.split('=')
pname = t_param[0]
pvalue = t_param[1]
if not pname in t_params:
t_params[pname] = []
t_params[pname].append( pvalue )

return t_params


def testParams( t_urlparse, payload ):
# t_params = urllib.parse.parse_qs( t_urlparse.query )
t_params = _parse_qs( t_urlparse.query )

for pname,t_values in t_params.items():
for k in range(len(t_values)):
pvalue = t_values[k]
t_params2 = copy.deepcopy(t_params)
if pvalue == '':
pvalue = 666
new_value = str(pvalue) + payload
# t_params2[pname][k] = urllib.parse.quote( new_value )
t_params2[pname][k] = new_value
new_query = rebuiltQuery( t_params2 )
t_urlparse = t_urlparse._replace(query=new_query)
url = urllib.parse.urlunparse(t_urlparse)
doTest( url )
# disable get/post swap
# t_urlparse = t_urlparse._replace(query='')
# url = urllib.parse.urlunparse(t_urlparse)
# doTest( url, 'POST', t_params2 )


def testFragment( t_urlparse, payload ):
# new_value = t_urlparse.fragment + urllib.parse.quote(payload)
new_value = t_urlparse.fragment + payload
t_urlparse = t_urlparse._replace(fragment=new_value)
url = urllib.parse.urlunparse(t_urlparse)
doTest( url )


def testPath( t_urlparse, payload ):
path = ''
t_path = ['/'] + t_urlparse.path.split('/')

for dir in t_path:
if len(dir):
path = path + '/' + dir
path = path.replace('//','/')
# new_value = os.path.dirname(t_urlparse.path) + '/' + urllib.parse.quote(payload)
# new_value = path + '/' + urllib.parse.quote(payload)
new_value = path + '/' + payload
new_value = new_value.replace('//','/')
t_urlparse = t_urlparse._replace(path=new_value)
url = urllib.parse.urlunparse(t_urlparse)
doTest( url )


def testPayload( url, payload ):
t_urlparse = urllib.parse.urlparse( url )

if len(t_urlparse.query):
testParams( t_urlparse, payload.strip('/') )

if len(t_urlparse.fragment):
testFragment( t_urlparse, payload.strip('/') )

testPath( t_urlparse, payload )


def testURL( url ):
time.sleep( 0.01 )

if _verbose <= 1:
sys.stdout.write( 'progress: %d/%d\r' % (t_multiproc['n_current'],t_multiproc['n_total']) )
t_multiproc['n_current'] = t_multiproc['n_current'] + 1

t_urlparse = urlparse(url)
pool = Pool( 10 )
pool.map( partial(testPayload,url), t_payloads )
pool.close()
pool.join()


def doTest( url, method='GET', post_params='' ):
t_urlparse = urllib.parse.urlparse(url)
u = t_urlparse.scheme + '_' + t_urlparse.netloc

if not u in t_exceptions:
Expand All @@ -48,7 +142,10 @@ def testURL( url ):
return

try:
r = requests.head( url, timeout=5, verify=False )
if method == 'POST':
r = requests.post( url, data=post_params, headers=t_custom_headers, timeout=5, verify=False )
else:
r = requests.head( url, headers=t_custom_headers, timeout=5, verify=False )
except Exception as e:
t_exceptions[u] = t_exceptions[u] + 1
if _verbose >= 3:
Expand All @@ -69,18 +166,80 @@ def testURL( url ):
if vuln == 'VULNERABLE':
t_vulnerable[u] = t_vulnerable[u] + 1

output = '%sC=%d\t\tT=%s\t\tV=%s\n' % (url.ljust(t_multiproc['u_max_length']),r.status_code,content_type,vuln)
# output = '%sC=%d\t\tT=%s\t\tV=%s\n' % (url.ljust(t_multiproc['u_max_length']),r.status_code,content_type,vuln)
output = '%s\t\tC=%d\t\tT=%s\t\tV=%s\n' % (url,r.status_code,content_type,vuln)

fp = open( t_multiproc['f_output'], 'a+' )
fp.write( output )
fp.close()

if _verbose >= 2 or (_verbose >= 1 and vuln == 'VULNERABLE'):
sys.stdout.write( '%s' % output )
if vuln == 'VULNERABLE':
sys.stdout.write( '%s%s%s' % (fg('light_red'),output,attr(0)) )
else:
sys.stdout.write( output )


# old version
# def testURL( url ):
# time.sleep( 0.01 )

# if _verbose <= 1:
# sys.stdout.write( 'progress: %d/%d\r' % (t_multiproc['n_current'],t_multiproc['n_total']) )
# t_multiproc['n_current'] = t_multiproc['n_current'] + 1

# t_urlparse = urlparse(url)
# u = t_urlparse.scheme + '_' + t_urlparse.netloc

# if not u in t_exceptions:
# t_exceptions[u] = 0
# if t_exceptions[u] >= MAX_EXCEPTION:
# if _verbose >= 3:
# print("skip too many exceptions %s" % t_urlparse.netloc)
# return

# if not u in t_vulnerable:
# t_vulnerable[u] = 0
# if t_vulnerable[u] >= MAX_VULNERABLE:
# if _verbose >= 3:
# print("skip already vulnerable %s" % t_urlparse.netloc)
# return

# try:
# r = requests.head( url, timeout=5, verify=False )
# except Exception as e:
# t_exceptions[u] = t_exceptions[u] + 1
# if _verbose >= 3:
# sys.stdout.write( "%s[-] error occurred: %s%s\n" % (fg('red'),e,attr(0)) )
# return

# if 'Content-Type' in r.headers:
# content_type = r.headers['Content-Type']
# else:
# content_type = '-'

# t_headers = list( map( str.lower,r.headers.keys() ) )
# if 'xcrlf' in t_headers:
# vuln = 'VULNERABLE'
# else:
# vuln = '-'

# if vuln == 'VULNERABLE':
# t_vulnerable[u] = t_vulnerable[u] + 1

# output = '%sC=%d\t\tT=%s\t\tV=%s\n' % (url.ljust(t_multiproc['u_max_length']),r.status_code,content_type,vuln)

# fp = open( t_multiproc['f_output'], 'a+' )
# fp.write( output )
# fp.close()

# if _verbose >= 2 or (_verbose >= 1 and vuln == 'VULNERABLE'):
# sys.stdout.write( '%s' % output )


parser = argparse.ArgumentParser()
parser.add_argument( "-a","--path",help="set paths list" )
parser.add_argument( "-d","--header",help="custom headers, example: cookie1=value1;cookie2=value2...", action="append" )
parser.add_argument( "-p","--payloads",help="set payloads list" )
parser.add_argument( "-o","--hosts",help="set host list (required or -u)" )
# parser.add_argument( "-r","--redirect",help="follow redirection" )
Expand All @@ -96,6 +255,13 @@ def testURL( url ):
else:
t_scheme = ['http','https']

t_custom_headers = {}
if args.header:
for header in args.header:
if ':' in header:
tmp = header.split(':')
t_custom_headers[ tmp[0].strip() ] = tmp[1].strip()

t_hosts = []
if args.hosts:
if os.path.isfile(args.hosts):
Expand Down Expand Up @@ -199,23 +365,41 @@ def testURL( url ):

for scheme in t_scheme:
for host in t_hosts:
for payload in t_payloads:
for path in t_path:
u = scheme + '://' + host.strip() + path + payload
t_totest.append( u )
l = len(u)
if l > u_max_length:
u_max_length = l

for url in t_urls:
for payload in t_payloads:
for path in t_path:
u = url.strip() + path + payload
u = scheme + '://' + host.strip() + path
t_totest.append( u )
l = len(u)
if l > u_max_length:
u_max_length = l

for url in t_urls:
for path in t_path:
u = url.strip() + path
t_totest.append( u )
l = len(u)
if l > u_max_length:
u_max_length = l

# old version
# for scheme in t_scheme:
# for host in t_hosts:
# for payload in t_payloads:
# for path in t_path:
# u = scheme + '://' + host.strip() + path + payload
# t_totest.append( u )
# l = len(u)
# if l > u_max_length:
# u_max_length = l

# for url in t_urls:
# for payload in t_payloads:
# for path in t_path:
# u = url.strip() + path + payload
# t_totest.append( u )
# l = len(u)
# if l > u_max_length:
# u_max_length = l

n_totest = len(t_totest)
sys.stdout.write( '%s[+] %d urls created.%s\n' % (fg('green'),n_totest,attr(0)) )
sys.stdout.write( '[+] testing...\n' )
Expand Down
Loading

0 comments on commit 1bfb8f8

Please sign in to comment.