-
Notifications
You must be signed in to change notification settings - Fork 43
/
Copy pathPyDirb.py
executable file
·162 lines (142 loc) · 5.66 KB
/
PyDirb.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
#!/usr/bin/env python
# Dirb alternative in python
# By Ahmed Shawky @lnxg33k
import sys
import uuid
from requests import request, packages
from multiprocessing.dummy import Pool as ThreadPool
from progressbar import ProgressBar, SimpleProgress
import time
import argparse
import logging
logging.basicConfig(level=logging.INFO, format='[+] %(asctime)s - %(message)s')
logger = logging.getLogger('BruteForce')
logging.getLogger("requests").setLevel(logging.WARNING)
packages.urllib3.disable_warnings()
def getFullUrls(url, paths, prefixes=[], ext=[]):
urls = []
for path in paths:
if prefixes:
for prefix in prefixes:
path = "%s%s" % (prefix, path)
if path:
urls.append("%s/%s" % (url.rstrip('/'), path.strip('/')))
if ext:
for i in ext:
fullUrl = "%s/%s.%s" % (
url.rstrip('/'), path.strip('/'), i.strip('.'))
urls.append(fullUrl)
return urls
def notFoundCode(url, cookies=None, userAgent=None):
url = '%s/%s' % (url.strip('/'), uuid.uuid4())
r = request(
"HEAD", url, cookies=cookieFormatter(cookies),
headers={'User-Agent': userAgent},
timeout=10, verify=False)
return r.status_code
def cookieFormatter(cookies):
if cookies:
cookiesDict = {}
for i in cookies.split(';'):
i = map(str.strip, i.split('='))
cookiesDict[i[0]] = i[1]
return cookiesDict
else:
return None
def fileExists(url, notFound=404, ignoreCodes=[], cookies=None, sleep=0, agent=None):
try:
r = request(
"HEAD", url, cookies=cookieFormatter(cookies),
headers={'User-Agent': agent}, verify=False, timeout=2)
responseHeaders = dict(r.headers.lower_items())
if r.status_code != notFound and r.status_code not in ignoreCodes:
data = {
'url': url.strip('/'), 'code': r.status_code,
'Content-Type': responseHeaders.get('content-type'),
'Content-Length': responseHeaders.get('content-length') or 0
}
logger.info(" %s (code:%d|Content-Type:%s|Content-Length:%s)" % (
url, data['code'], data['Content-Type'],
data['Content-Length']))
return data
except:
# print e.message
pass
finally:
time.sleep(sleep)
if __name__ == '__main__':
if len(sys.argv) <= 1:
msg = "usage: %s -h" % sys.argv[0]
exit(msg)
parser = argparse.ArgumentParser()
args = parser.add_argument_group('Options')
args.add_argument(
'-u', '--url', dest='url', metavar='',
help='\t\tThe target URL to scan.')
args.add_argument(
'-w', '--wordlist', dest='wordlist', metavar='',
help='\t\tPath to the wordlist.')
args.add_argument(
'-e', '--extensions', dest='extensions', metavar='', default=[],
help='\t\tAppend each word with theae extensions (e.g. asp,aspx).')
args.add_argument(
'-p', '--prefix', dest='prefixes', metavar='', default=[],
help='\t\tPrefix to add before each element in the wordlist.')
args.add_argument(
'-t', '--threads', dest='threads', type=int, default=30,
metavar='', help='\t\tNumber of concurrent threads (default 30).')
connection = parser.add_argument_group('Request')
connection.add_argument(
'-c', '--cookie', dest='cookie', metavar='',
help='\t\tSet a cookie to the request.')
connection.add_argument(
'-ua', '--user-agent', dest='agent', metavar='',
help='\t\tSpoof the request User-Agent.')
connection.add_argument(
'-s', '--sleep', dest='sleep', type=int, default=0, metavar='',
help='\t\tTime to sleep between concurrent requests. (default 0)')
connection.add_argument(
'-i', '--ignore', dest='ignore', default=[], metavar='',
help='\t\t HTTP response status codes to ignore (e.g. 300,500).')
options = parser.parse_args()
url = options.url
if options.ignore:
options.ignore = map(int, options.ignore.split(','))
with open(options.wordlist) as f:
paths = list(set(filter(None, map(str.strip, f.readlines()))))
if options.extensions:
options.extensions = options.extensions.split(',')
if options.prefixes:
options.prefixes = options.prefixes.split(',')
threads = options.threads
urls = getFullUrls(
url, paths, ext=options.extensions, prefixes=options.prefixes)
notFound = notFoundCode(
url=url, cookies=options.cookie, userAgent=options.agent)
print "\n==================================================="
print "[!] PyBirb [Dirb in Python with more features]."
print "[!] By: Ahmed Shawky @lnxg33k."
print "-------------"
print "[-] NotFound Code : %d" % notFound
print "[-] Ignore Codes : %s" % options.ignore
print "[-] Wordlist : %s" % options.wordlist
print "[-] Extensions : %s" % options.extensions
print "[-] Prefixes : %s" % options.prefixes
print "[-] Threads : %d" % threads
print "[-] Wait : %d" % options.sleep
print "====================================================\n"
result = []
pool = ThreadPool(threads)
pbar = ProgressBar(widgets=[SimpleProgress()], maxval=len(urls)).start()
r = [pool.apply_async(
fileExists, (
x, notFound, options.ignore, options.cookie,
options.sleep, options.agent
),
callback=result.append
) for x in urls]
while len(result) != len(urls):
pbar.update(len(result))
pbar.finish()
pool.close()
pool.join()