From cb5859c54e0196738db1a8c6c8afd3ca38da7f40 Mon Sep 17 00:00:00 2001 From: Dhrumil Mistry <56185972+dmdhrumilmistry@users.noreply.github.com> Date: Mon, 13 Jun 2022 18:18:33 +0530 Subject: [PATCH] use threads to discover dirs, files and subdomains rename websites to web rename Network to network --- pyhtools/attackers/Websites/crawler.py | 140 --------------- pyhtools/attackers/attackers.py | 32 ++-- .../attackers/{Websites => web}/__init__.py | 0 .../attackers/{Websites => web}/get_forms.py | 0 .../{Websites => web}/login_guesser.py | 0 .../attackers/{Websites => web}/spider.py | 0 .../{Websites => web}/vuln_scanner/README.md | 0 .../vuln_scanner/__init__.py | 0 .../{Websites => web}/vuln_scanner/scanner.py | 0 .../{Websites => web}/vuln_scanner/sqli.py | 0 .../{Websites => web}/vuln_scanner/test.txt | 0 .../vuln_scanner/vuln_scanner.py | 0 pyhtools/attackers/web/webdiscover.py | 161 ++++++++++++++++++ 13 files changed, 174 insertions(+), 159 deletions(-) delete mode 100644 pyhtools/attackers/Websites/crawler.py rename pyhtools/attackers/{Websites => web}/__init__.py (100%) rename pyhtools/attackers/{Websites => web}/get_forms.py (100%) rename pyhtools/attackers/{Websites => web}/login_guesser.py (100%) rename pyhtools/attackers/{Websites => web}/spider.py (100%) rename pyhtools/attackers/{Websites => web}/vuln_scanner/README.md (100%) rename pyhtools/attackers/{Websites => web}/vuln_scanner/__init__.py (100%) rename pyhtools/attackers/{Websites => web}/vuln_scanner/scanner.py (100%) rename pyhtools/attackers/{Websites => web}/vuln_scanner/sqli.py (100%) rename pyhtools/attackers/{Websites => web}/vuln_scanner/test.txt (100%) rename pyhtools/attackers/{Websites => web}/vuln_scanner/vuln_scanner.py (100%) create mode 100644 pyhtools/attackers/web/webdiscover.py diff --git a/pyhtools/attackers/Websites/crawler.py b/pyhtools/attackers/Websites/crawler.py deleted file mode 100644 index ee5ed79..0000000 --- a/pyhtools/attackers/Websites/crawler.py +++ /dev/null @@ -1,140 +0,0 @@ -import requests -import os -import argparse -import sys -from pyhtools.UI.colors import * - - -def get_args()->dict: - ''' - description: creates a ArgumentParser object collects - arguments and returns arguments as a dict - params: None - returns: dict - ''' - parser = argparse.ArgumentParser() - parser.add_argument('-t', '--target-domain', dest='target_domain', help='domain of your target eg. google.com, bing.com, facebook.com, etc.') - parser.add_argument('-w', '--wordlist', dest='wordlist', help='path to wordlist') - parser.add_argument('-m','--mode', dest='mode', help='modes : subdomain(find subdomains of the target domain), dirs(find directories of the target domain), subdir (find subdomain and directories of the target domain).') - - args = parser.parse_args() - del parser - - args_dict = { - 'mode' : args.mode, - 'wordlist' : args.wordlist, - 'target_domain':args.target_domain - } - - return args_dict - - -def request(url)->bool: - ''' - description: requests for specific url and - returns true if url is valid. - params : url(str) - returns : bool - ''' - try: - response = requests.get(url, timeout=0.5) - # print(url) - # print(response) - if response.status_code == 200: - return True - return False - except requests.exceptions.ConnectionError: - return False - except UnicodeError: - return False - except Exception as e: - print(BRIGHT_RED + '[-] Request Exception : ', e) - return False - - -def check_subdomain(domain:str, subdomain:str)->bool: - ''' - description: checks if subdomain exists under domain. - prints generated url and returns True if url is valid - params: subdomain(str), domain(str) - returns: bool - ''' - url = f'http://{subdomain}.{domain}' - # print(url) - if request(url): - print('[*] Valid Subdomain : ', url) - return True - else: - return False - - -def check_directories(domain:str, dir_name:str)->bool: - ''' - description: checks for directory for domain. - prints url and returns True if generated url is valid. - params: domain(str), dir_name(str) - returns : bool - ''' - url = f'http://{domain}/{dir_name}' - - if request(url): - print('[*] Valid Directory : ', url) - return True - else: - return False - - -def perform_function(func, wordlist:str, domain:str)->bool: - ''' - description: performs specific function on passed keyword arguements - params: func(function), **kwargs(keyword arguments) - returns: bool - ''' - try: - print(BRIGHT_WHITE + '[*] Loading wordlists...') - - print('='*25) - if os.path.isfile(wordlist): - with open(wordlist, 'r') as wordlist_file: - for word in wordlist_file: - word = word.strip() - # print(word) - func(domain, word) - else: - print(BRIGHT_RED + '[-] Wordlist Not Found.') - print('='*25) - print(BRIGHT_YELLOW + '[*] Process Completed.') - - except Exception as e: - print(BRIGHT_RED + '[-] Perform Exception : ', e) - print(BRIGHT_RED + '[!] Process Interrupted!') - - -# ========== Main =============== -if __name__ == '__main__': - print(BRIGHT_YELLOW + '[*] Starting crawler...') - - args = get_args() - # print(args) - - wordlist_file = r'{}'.format(args['wordlist']) - target_domain = args['target_domain'] - - try: - if args['mode'] == 'subdomain': - print(BRIGHT_YELLOW + '[1] Finding subdomains') - perform_function(check_subdomain, wordlist_file, target_domain) - elif args['mode'] == 'dirs': - print(BRIGHT_YELLOW + '[2] Finding directories and files') - perform_function(check_directories, wordlist_file, target_domain) - elif args['mode'] == 'subdirs': - print(BRIGHT_YELLOW + '[1] Finding subdomains') - perform_function(check_subdomain, wordlist_file, target_domain) - - print(BRIGHT_YELLOW + '[2] Finding directories and files') - perform_function(check_directories, wordlist_file, target_domain) - else: - print(BRIGHT_RED + '[-] Unkown mode: use --help or -h for help') - except KeyboardInterrupt: - print('[!] ctrl+c detected! Exiting Program..') - sys.exit() \ No newline at end of file diff --git a/pyhtools/attackers/attackers.py b/pyhtools/attackers/attackers.py index a58b9fd..a04c760 100644 --- a/pyhtools/attackers/attackers.py +++ b/pyhtools/attackers/attackers.py @@ -1,13 +1,13 @@ import json -import pyhtools.attackers.Network.arpspoofer as arp -import pyhtools.attackers.Network.nwscan as nwscan -import pyhtools.attackers.Network.machngr as machngr -import pyhtools.attackers.Websites.login_guesser as web_login -import pyhtools.attackers.Websites.spider as spider -import pyhtools.attackers.Websites.crawler as crawler +import pyhtools.attackers.network.arpspoofer as arp +import pyhtools.attackers.network.nwscan as nwscan +import pyhtools.attackers.network.machngr as machngr +import pyhtools.attackers.web.login_guesser as web_login +import pyhtools.attackers.web.spider as spider from pyhtools.UI.colors import * -from pyhtools.attackers.Websites.vuln_scanner.scanner import Scanner +from pyhtools.attackers.web.vuln_scanner.scanner import Scanner +from pyhtools.attackers.web.webdiscover import Discoverer # NETWORK ATTACKS @@ -34,7 +34,7 @@ def nw_scan(): params: None returns: None ''' - ip_range = input('[+] IP RANGE : ') + ip_range = input('[+] IP (192.168.10.1/24): ') nwscan.run_nwscan(ip_range) @@ -123,10 +123,9 @@ def webcrawldirs(): params: None returns: None ''' - target_url = input('[+] TARGET URL : ') - wordlist_path = input('[+] WORDLIST PATH : ') - crawler.perform_function(crawler.check_directories, - wordlist_path, target_url) + domain = input('[+] DOMAIN (duckduckgo.com): ') + wordlist_path = input('[+] WORDLIST PATH: ') + Discoverer.check_dirs(domain=domain, wordlist=wordlist_path) def webcrawlsubdom(): @@ -135,11 +134,6 @@ def webcrawlsubdom(): params: None returns: None ''' - target_url = input('[+] TARGET URL : ') + domain = input('[+] DOMAIN (duckduckgo.com) : ') wordlist_path = input('[+] WORDLIST PATH : ') - crawler.perform_function(crawler.check_subdomain, - wordlist_path, target_url) - - -if __name__ == "__main__": - print('[*] Attackers module!. Exiting...') + Discoverer.check_subdomains(domain=domain, wordlist=wordlist_path) diff --git a/pyhtools/attackers/Websites/__init__.py b/pyhtools/attackers/web/__init__.py similarity index 100% rename from pyhtools/attackers/Websites/__init__.py rename to pyhtools/attackers/web/__init__.py diff --git a/pyhtools/attackers/Websites/get_forms.py b/pyhtools/attackers/web/get_forms.py similarity index 100% rename from pyhtools/attackers/Websites/get_forms.py rename to pyhtools/attackers/web/get_forms.py diff --git a/pyhtools/attackers/Websites/login_guesser.py b/pyhtools/attackers/web/login_guesser.py similarity index 100% rename from pyhtools/attackers/Websites/login_guesser.py rename to pyhtools/attackers/web/login_guesser.py diff --git a/pyhtools/attackers/Websites/spider.py b/pyhtools/attackers/web/spider.py similarity index 100% rename from pyhtools/attackers/Websites/spider.py rename to pyhtools/attackers/web/spider.py diff --git a/pyhtools/attackers/Websites/vuln_scanner/README.md b/pyhtools/attackers/web/vuln_scanner/README.md similarity index 100% rename from pyhtools/attackers/Websites/vuln_scanner/README.md rename to pyhtools/attackers/web/vuln_scanner/README.md diff --git a/pyhtools/attackers/Websites/vuln_scanner/__init__.py b/pyhtools/attackers/web/vuln_scanner/__init__.py similarity index 100% rename from pyhtools/attackers/Websites/vuln_scanner/__init__.py rename to pyhtools/attackers/web/vuln_scanner/__init__.py diff --git a/pyhtools/attackers/Websites/vuln_scanner/scanner.py b/pyhtools/attackers/web/vuln_scanner/scanner.py similarity index 100% rename from pyhtools/attackers/Websites/vuln_scanner/scanner.py rename to pyhtools/attackers/web/vuln_scanner/scanner.py diff --git a/pyhtools/attackers/Websites/vuln_scanner/sqli.py b/pyhtools/attackers/web/vuln_scanner/sqli.py similarity index 100% rename from pyhtools/attackers/Websites/vuln_scanner/sqli.py rename to pyhtools/attackers/web/vuln_scanner/sqli.py diff --git a/pyhtools/attackers/Websites/vuln_scanner/test.txt b/pyhtools/attackers/web/vuln_scanner/test.txt similarity index 100% rename from pyhtools/attackers/Websites/vuln_scanner/test.txt rename to pyhtools/attackers/web/vuln_scanner/test.txt diff --git a/pyhtools/attackers/Websites/vuln_scanner/vuln_scanner.py b/pyhtools/attackers/web/vuln_scanner/vuln_scanner.py similarity index 100% rename from pyhtools/attackers/Websites/vuln_scanner/vuln_scanner.py rename to pyhtools/attackers/web/vuln_scanner/vuln_scanner.py diff --git a/pyhtools/attackers/web/webdiscover.py b/pyhtools/attackers/web/webdiscover.py new file mode 100644 index 0000000..86b2d33 --- /dev/null +++ b/pyhtools/attackers/web/webdiscover.py @@ -0,0 +1,161 @@ +import argparse +import os +import sys +import requests + + +from pyhtools.UI.colors import * +from threading import Thread + + +class Discoverer: + ''' + helps to discover directories, files and subdomains + ''' + @staticmethod + def request(url, valid_status_codes: list[int] = None) -> bool: + ''' + description: requests for specific url and + returns true if url is valid. + params : url(str), status_codes list[int] + returns : bool + ''' + if not valid_status_codes: + status_codes = [200, 204, 301, 302, 307, 401] + + try: + response = requests.get(url, timeout=0.5) + if response.status_code in status_codes: + return True + return False + except Exception as e: + # print(f'{BRIGHT_RED}[-] Request Exception: {e}') + return False + + @staticmethod + def __check_subdomain(domain: str, subdomain: str) -> bool: + ''' + description: checks if subdomain exists under domain. + prints generated url and returns True if url is valid + params: subdomain(str), domain(str) + returns: bool + ''' + url = f'http://{subdomain}.{domain}' + + if Discoverer.request(url): + print(f'[\u2713] {url}') + return True + else: + return False + + @staticmethod + def __check_directory(domain: str, dir_name: str) -> bool: + ''' + description: checks for directory for domain. + prints url and returns True if generated url is valid. + params: domain(str), dir_name(str) + returns : bool + ''' + url = f'http://{domain}/{dir_name}' + if Discoverer.request(url): + print(f'[\u2713] {url}') + return True + else: + return False + + @staticmethod + def check_dirs(domain: str, wordlist: str, ): + return Discoverer.__perform_function( + func=Discoverer.__check_directory, + wordlist=wordlist, + domain=domain + ) + + @staticmethod + def check_subdomains(domain: str, wordlist: str): + return Discoverer.__perform_function( + func=Discoverer.__check_subdomain, + wordlist=wordlist, + domain=domain + ) + + @staticmethod + def __perform_function(func, wordlist: str, domain: str, threads: int = 5) -> bool: + ''' + description: performs specific function on passed keyword arguements + params: func(function), **kwargs(keyword arguments) + returns: bool + ''' + # if file does not exists return False + if not os.path.isfile(wordlist): + return False + + # load wordlists into a list + words = [''] + with open(wordlist, 'r') as wordlist_file: + words = wordlist_file.readlines() + + # sanitize list + words = [word.strip() for word in words] + + # function to manage thread + def manage_thread(): + while len(words) != 0: + word = words[0] + func(domain, word) + words.pop(0) + + # create threads list + threads_list:list[Thread] = [] + + for _ in range(threads): + thread = Thread(target=manage_thread) + threads_list.append(thread) + + # start threads + for thread in threads_list: + thread.start() + + # stop threads + for thread in threads_list: + thread.join() + + return True + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser(prog='webdiscover') + parser.add_argument('-t', '--target-domain', dest='target_domain', + help='domain of your target eg. google.com, bing.com, facebook.com, etc.', required=True) + parser.add_argument('-w', '--wordlist', dest='wordlist', + help='path to wordlist', required=True) + parser.add_argument('-m', '--mode', dest='mode', + help='modes : subdom(find subdomains of the target domain), dir(find directories of the target domain). default mode is `dir`', default='dir') + + args = parser.parse_args() + target_domain = args.target_domain + wordlist_file = args.wordlist + mode = args.mode + + try: + if mode == 'subdom': + print(f'{BRIGHT_YELLOW}[1] Finding subdomains') + Discoverer.check_subdomains( + domain=target_domain, + wordlist=wordlist_file + ) + + elif mode == 'dir': + print(f'{BRIGHT_YELLOW}[2] Finding directories and files') + Discoverer.check_dirs( + domain=target_domain, + wordlist=wordlist_file, + ) + + else: + print(f'{BRIGHT_RED}[!] invalid mode. Use -h tag to print help.') + + except KeyboardInterrupt or EOFError: + print('[!] User Interrupted!') + sys.exit()