-
-
Notifications
You must be signed in to change notification settings - Fork 7
/
google-search.py
executable file
·88 lines (70 loc) · 2.34 KB
/
google-search.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
#!/usr/bin/python3
import os
import sys
import json
import argparse
import urllib.parse
from goop import goop
from functools import partial
from multiprocessing.dummy import Pool
from colored import fg, bg, attr
# def banner():
# print("""
# _ _
# __ _ ___ ___ __ _| | ___ ___ ___ __ _ _ __ ___| |__ _ __ _ _
# / _` |/ _ \ / _ \ / _` | |/ _ \ / __|/ _ \/ _` | '__/ __| '_ \ | '_ \| | | |
# | (_| | (_) | (_) | (_| | | __/ \__ \ __/ (_| | | | (__| | | | _ | |_) | |_| |
# \__, |\___/ \___/ \__, |_|\___| |___/\___|\__,_|_| \___|_| |_| (_) | .__/ \__, |
# |___/ |___/ |_| |___/
# by @gwendallecoguic
# """)
# pass
parser = argparse.ArgumentParser()
# parser.add_argument( "-r","--raw",help="remove banner", action="store_true" )
parser.add_argument( "-s","--search",help="search term (required)" )
parser.add_argument( "-d","--decode",help="urldecode the results", action="store_true" )
parser.add_argument( "-c","--fbcookie",help="your facebook cookie" )
parser.parse_args()
args = parser.parse_args()
# if not args.raw:
# banner()
if args.fbcookie:
fb_cookie = args.fbcookie
else:
fb_cookie = os.getenv('FACEBOOK_COOKIE')
if not fb_cookie:
parser.error( 'facebook cookie is missing' )
if args.search:
search = args.search
else:
parser.error( 'search expression is missing' )
if args.decode:
urldecode = True
else:
urldecode = False
# print(fb_cookie)
def doMultiSearch( term, urldecode, page ):
zero_result = 0
for i in range(page-5,page-1):
if i != page and i in page_history and page_history[i] == 0:
zero_result = zero_result + 1
if zero_result < 3:
# print(page)
s_results = goop.search( term, fb_cookie, page, True )
# print(s_results)
page_history[page] = len(s_results)
for i in s_results:
if urldecode:
print( urllib.parse.unquote(s_results[i]['url']) )
else:
print( s_results[i]['url'] )
else:
for i in range(page,end_page):
page_history[i] = 0
page_history = {}
start_page = 0
end_page = 100
pool = Pool( 5 )
pool.map( partial(doMultiSearch,search,urldecode), range(start_page,end_page) )
pool.close()
pool.join()