forked from gwen001/pentest-tools
-
Notifications
You must be signed in to change notification settings - Fork 0
/
google-search.py
executable file
·127 lines (105 loc) · 3.66 KB
/
google-search.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
#!/usr/bin/python3.5
import os
import sys
import json
import argparse
import urllib.parse
from goop import goop
from functools import partial
from multiprocessing.dummy import Pool
from colored import fg, bg, attr
def banner():
print("""
_ _
__ _ ___ ___ __ _| | ___ ___ ___ __ _ _ __ ___| |__ _ __ _ _
/ _` |/ _ \ / _ \ / _` | |/ _ \ / __|/ _ \/ _` | '__/ __| '_ \ | '_ \| | | |
| (_| | (_) | (_) | (_| | | __/ \__ \ __/ (_| | | | (__| | | | _ | |_) | |_| |
\__, |\___/ \___/ \__, |_|\___| |___/\___|\__,_|_| \___|_| |_| (_) | .__/ \__, |
|___/ |___/ |_| |___/
by @gwendallecoguic
""")
pass
parser = argparse.ArgumentParser()
parser.add_argument( "-b","--nobanner",help="disable the banner", action="store_true" )
parser.add_argument( "-f","--file",help="source file that contains the dorks" )
parser.add_argument( "-t","--term",help="search term", action="append" )
parser.add_argument( "-d","--decode",help="urldecode the results", action="store_true" )
parser.add_argument( "-e","--endpage",help="search end page, default 50" )
parser.add_argument( "-s","--startpage",help="search start page, default 0" )
parser.add_argument( "-c","--fbcookie",help="your facebook cookie" )
parser.add_argument( "-o","--output",help="output file" )
parser.add_argument( "-n","--numbers-only",help="don't display the results but how many results where found", action="store_true" )
parser.parse_args()
args = parser.parse_args()
if not args.nobanner:
banner()
if args.startpage:
start_page = int(args.startpage)
else:
start_page = 0
if args.endpage:
end_page = int(args.endpage)
else:
end_page = 50
if args.fbcookie:
fb_cookie = args.fbcookie
else:
fb_cookie = os.getenv('FACEBOOK_COOKIE')
if not fb_cookie:
parser.error( 'facebook cookie is missing' )
if args.file:
if os.path.isfile(args.file):
fp = open( args.file, 'r' )
t_terms = fp.read().split('\n')
fp.close()
else:
parser.error( '%s file not found' % args.file )
elif args.term:
t_terms = args.term
else:
parser.error( 'term is missing' )
if args.output:
numbers_only = True
else:
numbers_only = False
if args.numbers_only:
numbers_only = True
else:
numbers_only = False
if args.decode:
urldecode = True
else:
urldecode = False
def doMultiSearch( term, numbers_only, urldecode, page ):
zero_result = 0
for i in range(page-5,page-1):
if i != page and i in page_history and page_history[i] == 0:
zero_result = zero_result + 1
if zero_result < 3:
s_results = goop.search( term, fb_cookie, page, True )
# print(s_results)
# print(s_results)
page_history[page] = len(s_results)
if not numbers_only:
for i in s_results:
if urldecode:
print( urllib.parse.unquote(s_results[i]['url']) )
else:
print( s_results[i]['url'] )
else:
for i in range(page,end_page):
page_history[i] = 0
for term in t_terms:
page_history = {}
pool = Pool( 5 )
pool.map( partial(doMultiSearch,term,numbers_only,urldecode), range(start_page,end_page) )
pool.close()
pool.join()
if numbers_only:
n_results = sum( page_history.values() )
if n_results:
color = 'white'
else:
color = 'dark_gray'
full_url = 'https://www.google.com/search?q=' + urllib.parse.quote(term)
sys.stdout.write( '%s%s (%d)%s\n' % (fg(color),full_url,n_results,attr(0)) )