diff --git a/.gitignore b/.gitignore index a969a05..ec55291 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,8 @@ map/static/data database.db .idea/ old_databases/ +paused.conf +test_data/ # Tests files tests/test_data/test_database diff --git a/docker/grinder/Dockerfile b/docker/grinder/Dockerfile index 8c31009..c07f1d4 100644 --- a/docker/grinder/Dockerfile +++ b/docker/grinder/Dockerfile @@ -17,10 +17,11 @@ COPY . /app/ COPY /docker/grinder/entrypoint.sh /app/entrypoint.sh COPY --from=tls-scanner-build /TLS-Scanner/apps /app/TLS-Scanner/apps -RUN apk add --no-cache nmap nmap-scripts && \ +RUN apk add --no-cache nmap nmap-scripts masscan libpcap && \ apk add --no-cache libpng freetype libstdc++ pkgconfig openjdk8 && \ - apk add --no-cache --virtual .build-deps gcc build-base python-dev libpng-dev musl-dev freetype-dev && \ + apk add --no-cache --virtual .build-deps gcc build-base python3-dev libpng-dev musl-dev freetype-dev && \ ln -s /usr/include/locale.h /usr/include/xlocale.h && \ + ln -s /usr/lib/libpcap.so.1 /usr/lib/libpcap.so && \ pip install --no-cache-dir -r /app/requirements.txt && \ pip uninstall -y flask && \ apk del .build-deps && \ diff --git a/grinder/core.py b/grinder/core.py index 8267193..738fbe2 100644 --- a/grinder/core.py +++ b/grinder/core.py @@ -3,24 +3,25 @@ Basic core module for grinder. All functions from Other modules must be wrapped here for proper usage. """ - -from typing import NamedTuple, List, Dict -from termcolor import cprint -from re import findall +import os +from ipaddress import ip_network, ip_address from ntpath import basename +from re import findall +from typing import NamedTuple, List, Dict -# from enforce import runtime_validation +from termcolor import cprint -from grinder.vulnersconnector import VulnersConnector from grinder.censysconnector import CensysConnector from grinder.continents import GrinderContinents from grinder.dbhandling import GrinderDatabase from grinder.decorators import exception_handler, timer from grinder.defaultvalues import ( DefaultValues, + DefaultMasscanScanValues, DefaultNmapScanValues, DefaultVulnersScanValues, DefaultScriptCheckerValues, + TopPorts, ) from grinder.errors import ( GrinderCoreSearchError, @@ -51,6 +52,7 @@ GrinderCoreCensysSaveToDatabaseError, GrinderCoreSaveResultsToDatabaseError, GrinderCoreNmapScanError, + GrinderCoreMasscanScanError, GrinderCoreFilterQueriesError, GrinderCoreVulnersScanError, GrinderCoreRunScriptsError, @@ -59,17 +61,23 @@ GrinderCoreSaveVulnersResultsError, GrinderCoreSaveVulnersPlotsError, GrinderCoreForceUpdateCombinedResults, + GrinderCoreHostMasscanResultsError, + GrinderCoreMasscanSaveToDatabaseError, + MasscanConnectorScanError, ) from grinder.filemanager import GrinderFileManager from grinder.mapmarkers import MapMarkers +# from enforce import runtime_validation +from grinder.masscanconnector import MasscanConnector from grinder.nmapprocessmanager import NmapProcessingManager +from grinder.nmapscriptexecutor import NmapScriptExecutor from grinder.plots import GrinderPlots -from grinder.shodanconnector import ShodanConnector -from grinder.utils import GrinderUtils from grinder.pyscriptexecutor import PyProcessingManager -from grinder.nmapscriptexecutor import NmapScriptExecutor -from grinder.tlsscanner import TlsScanner +from grinder.shodanconnector import ShodanConnector from grinder.tlsparser import TlsParser +from grinder.tlsscanner import TlsScanner +from grinder.utils import GrinderUtils +from grinder.vulnersconnector import VulnersConnector class HostInfo(NamedTuple): @@ -104,10 +112,11 @@ def __init__( shodan_api_key: str = "", censys_api_id: str = "", censys_api_secret: str = "", - vulners_api_key: str = "" + vulners_api_key: str = "", ) -> None: self.shodan_processed_results: dict = {} self.censys_processed_results: dict = {} + self.masscan_results: dict = {} self.combined_results: dict = {} self.entities_count_all: list = [] @@ -244,7 +253,13 @@ def __get_proper_entity_name(entity_name: str) -> str: :param entity_name: name of entity :return: modified entity name """ - if entity_name.lower() in ["continent", "port", "product", "vendor", "organization"]: + if entity_name.lower() in [ + "continent", + "port", + "product", + "vendor", + "organization", + ]: return entity_name + "s" elif entity_name.lower() in ["country", "vulnerability"]: return entity_name[:-1] + "ies" @@ -416,6 +431,7 @@ def __force_update_combined_results(self) -> None: **self.combined_results, **self.censys_processed_results, **self.shodan_processed_results, + **self.masscan_results, } @exception_handler(expected_exception=GrinderCoreLoadResultsFromDbError) @@ -434,6 +450,7 @@ def load_results_from_db(self, queries_filename: str) -> list or dict: self.combined_results = {} self.shodan_processed_results = {} self.censys_processed_results = {} + self.masscan_results = {} else: print("Previous scan for this queries list was successfully loaded from database.") return self.combined_results @@ -442,6 +459,7 @@ def load_results_from_db(self, queries_filename: str) -> list or dict: self.combined_results = self.db.load_last_results() self.shodan_processed_results = self.db.load_last_shodan_results() self.censys_processed_results = self.db.load_last_censys_results() + self.masscan_results = self.db.load_last_masscan_results() print("Results of latest scan was successfully loaded from database.") return self.combined_results except GrinderDatabaseLoadResultsError: @@ -455,14 +473,17 @@ def load_results(self, queries_filename: str = "") -> list: :param queries_filename: name of file with queries to load :return: processed search results """ - return self.load_results_from_file() or self.load_results_from_db(queries_filename) + return self.load_results_from_file() \ + or self.load_results_from_db(queries_filename) @exception_handler(expected_exception=GrinderCoreSaveVulnersResultsError) - def save_vulners_results(self, - results: dict, - name: str, - dest_dir=DefaultValues.RESULTS_DIRECTORY, - hosts_results=None) -> None: + def save_vulners_results( + self, + results: dict, + name: str, + dest_dir=DefaultValues.RESULTS_DIRECTORY, + hosts_results=None, + ) -> None: """ Save results from vulners separately from another results :param results: results to save @@ -477,9 +498,11 @@ def save_vulners_results(self, dest_dir=dest_dir, json_file=f"{name.replace(' ', '_')}.json", ) - bypass_list = ["vulners exploits by vulnerabilities", - "vulners by cvss groups", - "hosts groupped by vulnerabilities"] + bypass_list = [ + "vulners exploits by vulnerabilities", + "vulners by cvss groups", + "hosts groupped by vulnerabilities", + ] self.filemanager.write_results_csv( results.values() if name not in bypass_list else results, dest_dir=dest_dir, @@ -490,11 +513,13 @@ def save_vulners_results(self, results, dest_dir=dest_dir, csv_file=f"{name.replace(' ', '_')}.csv", - hosts_results=hosts_results + hosts_results=hosts_results, ) @exception_handler(expected_exception=GrinderCoreSaveVulnersPlotsError) - def save_vulners_plots(self, results: dict or list, name: str, suptitle: str) -> None: + def save_vulners_plots( + self, results: dict or list, name: str, suptitle: str + ) -> None: """ Create plots with vulners results :param results: results to save @@ -505,8 +530,7 @@ def save_vulners_plots(self, results: dict or list, name: str, suptitle: str) -> cprint(f"Create Vulners graphical plots for {name}...", "blue", attrs=["bold"]) plots = GrinderPlots() plots.create_pie_chart( - results=results, - suptitle=f"{suptitle}", + results=results, suptitle=f"{suptitle}", ) plots.save_pie_chart( relative_path=DefaultValues.PNG_VULNERS_RESULTS, @@ -566,16 +590,14 @@ def vulners_report(self) -> None: (vulners_exploits_by_cpe, "vulners exploits by software"), (vulners_by_cvss_groups, "vulners by cvss groups"), (vulners_critical_vulnerabilities_hosts, "hosts with critical vulnerabilities"), - (vulners_by_cvss_groups_hosts, "hosts groupped by vulnerabilities") + (vulners_by_cvss_groups_hosts, "hosts groupped by vulnerabilities"), ] # Saver for results, name in named_results_to_save: if not results: continue self.save_vulners_results( - results, - name=name, - hosts_results=self.combined_results, + results, name=name, hosts_results=self.combined_results, ) # Count length @@ -583,25 +605,27 @@ def vulners_report(self) -> None: length_critical_vulnerabilities = len(vulners_critical_vulnerabilities.keys()) length_references_vulnerabilities = len(vulners_exploits_by_cve.keys()) length_exploitable_hosts = len(vulners_exploits_by_cpe.keys()) - length_hosts_with_critical_vulnerabilities = len(vulners_critical_vulnerabilities_hosts.keys()) + length_hosts_with_critical_vulnerabilities = len( + vulners_critical_vulnerabilities_hosts.keys() + ) length_all_hosts = len(self.combined_results) # Set labels and definitions for plots hosts_with_critical_vulnerabilities_comparison = { "Other": length_all_hosts - length_hosts_with_critical_vulnerabilities, - "With Critical Vulnerabilities": length_hosts_with_critical_vulnerabilities + "With Critical Vulnerabilities": length_hosts_with_critical_vulnerabilities, } critical_vulnerabilities_comparison = { "Other": length_vulnerabilities - length_critical_vulnerabilities, - "Critical": length_critical_vulnerabilities + "Critical": length_critical_vulnerabilities, } vulnerabilities_with_exploits_comparison = { "Other": length_vulnerabilities - length_references_vulnerabilities, - "Referenced in Exploits": length_references_vulnerabilities + "Referenced in Exploits": length_references_vulnerabilities, } cpes_with_exploits_comparison = { "Other": length_all_hosts - length_exploitable_hosts, - "With Exploits": length_exploitable_hosts + "With Exploits": length_exploitable_hosts, } vulners_cvss_comparison = { key: len(value) for key, value in vulners_by_cvss_groups.items() @@ -639,13 +663,15 @@ def vulners_report(self) -> None: { "results": vulners_cvss_hosts_comparison, "name": "hosts groupped by cvss rating", - "suptitle": "Percentage of nodes divided into groups of CVSS rating vulnerabilities" - } + "suptitle": "Percentage of nodes divided into groups of CVSS rating vulnerabilities", + }, ] for entity_to_save in plots_information_to_save: - self.save_vulners_plots(entity_to_save.get("results"), - name=entity_to_save.get("name"), - suptitle=entity_to_save.get("suptitle")) + self.save_vulners_plots( + entity_to_save.get("results"), + name=entity_to_save.get("name"), + suptitle=entity_to_save.get("suptitle"), + ) @exception_handler(expected_exception=GrinderCoreSaveResultsError) def save_results(self, dest_dir: str = DefaultValues.RESULTS_DIRECTORY) -> None: @@ -658,7 +684,12 @@ def save_results(self, dest_dir: str = DefaultValues.RESULTS_DIRECTORY) -> None: cprint("Save all results...", "blue", attrs=["bold"]) # If all scan results were empty - if not self.combined_results and not self.shodan_processed_results and not self.censys_processed_results: + if ( + not self.combined_results + and not self.shodan_processed_results + and not self.censys_processed_results + and not self.masscan_results + ): return # If some results are exists, but combined results are empty - refresh it elif not self.combined_results: @@ -705,7 +736,7 @@ def save_results(self, dest_dir: str = DefaultValues.RESULTS_DIRECTORY) -> None: @exception_handler(expected_exception=GrinderCoreIsHostExistedError) def __is_host_existed(self, ip: str) -> bool: """ - Check if current host is existed in current results. + Check if current host is existed in current results. :param ip: host ip :return: answer to question "Is current host already scanned?" @@ -839,6 +870,40 @@ def __parse_current_host_censys_results( {censys_result_as_dict.get("ip"): censys_result_as_dict} ) + @exception_handler(expected_exception=GrinderCoreHostMasscanResultsError) + def __parse_masscan_results(self, hosts: dict, query: str, product_info: dict) -> None: + """ + Parse raw results from Masscan. Results were received from + MasscanConnector module. + + :param hosts: all hosts information + :param product_info: information about current product + :return: None + """ + for host in hosts.keys(): + ports = ",".join([str(p) for p in hosts.get(host, {}).get("tcp", {}).keys()]) + host_info = HostInfo( + product=product_info.get("product", "Unknown product"), + vendor=product_info.get("vendor", "Unknown vendor"), + query=query, + port=ports, + proto="", + ip=host, + lat="", + lng="", + country="", + organization="", + vulnerabilities=dict( + shodan_vulnerabilities={}, + vulners_vulnerabilities={}, + ), + nmap_scan={}, + scripts=dict(py_script=None, nse_script=None), + ) + masscan_result_as_dict = dict(host_info._asdict()) + self.masscan_results.update({host: masscan_result_as_dict}) + + @exception_handler(expected_exception=GrinderCoreInitDatabaseCallError) def __init_database(self, queries_filename: str) -> None: """ @@ -858,9 +923,12 @@ def __increment_prev_scan_results(self): """ self.shodan_processed_results = self.db.load_all_shodan_results_by_scan_name() self.censys_processed_results = self.db.load_all_censys_results_by_scan_name() + self.masscan_results = self.db.load_all_masscan_results_by_scan_name() self.__force_update_combined_results() if self.combined_results: - print(f"Results from previous scans were loaded: {len(self.combined_results)} hosts") + print( + f"Results from previous scans were loaded: {len(self.combined_results)} hosts" + ) @exception_handler(expected_exception=GrinderCoreCloseDatabaseError) def __close_database(self) -> None: @@ -943,6 +1011,54 @@ def __censys_save_to_database(self, query: dict) -> None: query=query, results_count=results_count, results=results_by_query ) + @exception_handler(expected_exception=GrinderCoreMasscanSaveToDatabaseError) + def __masscan_save_to_database(self, query: dict) -> None: + """ + Save current query-based results to database + + :param query: current search query + :return: None + """ + def check_ip(address: str, networks: str) -> bool: + ranges = networks.split(",") + host_ip = ip_address(address) + + for r in ranges: + if "-" in r: + scope = r.split("-") + min_address = ip_address(scope[0]) + max_address = ip_address(scope[1]) + if min_address <= host_ip <= max_address: + return True + + if "/" in r: + net = ip_network(r, False) + if host_ip in net \ + or host_ip == net.broadcast_address \ + or host_ip == net.num_addresses: + return True + + if address == r: + return True + + return False + + results_by_query = list( + filter( + lambda host: check_ip(host.get("ip"), query.get("hosts")), + self.masscan_results.values(), + ) + ) + + results_count = len(results_by_query) if results_by_query else None + + if results_count is None: + return + + self.db.add_masscan_scan_data( + query=query, results_count=results_count, results=results_by_query + ) + @exception_handler(expected_exception=GrinderCoreSaveResultsToDatabaseError) def save_results_to_database(self, close: bool = True): """ @@ -957,6 +1073,8 @@ def save_results_to_database(self, close: bool = True): self.__shodan_save_to_database(query) for query in product_info.get("censys_queries", []) or []: self.__censys_save_to_database(query) + for query in product_info.get("masscan_settings", []) or []: + self.__masscan_save_to_database(query) self.__update_end_time_database() self.__update_results_count( @@ -1015,7 +1133,7 @@ def __process_current_product_queries(self, product_info: dict) -> None: adds information about product in database, search hosts with queries and parse them after that. - :param product_info (dict): all information about current product + :param product_info (dict): all information about current product including queries, vendor, confidence etc. :return None: """ @@ -1023,12 +1141,20 @@ def __process_current_product_queries(self, product_info: dict) -> None: # Shodan queries processor len_of_shodan_queries = len(product_info.get("shodan_queries") or []) - for query_index, query_info in enumerate(product_info.get("shodan_queries") or []): - if not self.__is_query_confidence_valid(query_info.get("query_confidence", "") or ""): + for query_index, query_info in enumerate( + product_info.get("shodan_queries") or [] + ): + if not self.__is_query_confidence_valid( + query_info.get("query_confidence", "") or "" + ): continue query = query_info.get("query") - cprint(f"{query_index} / {len_of_shodan_queries} :: " - f"Current Shodan query is: {query or 'Empty query field'}", "blue", attrs=["bold"]) + cprint( + f"{query_index + 1} / {len_of_shodan_queries} :: " + f"Current Shodan query is: {query or 'Empty query field'}", + "blue", + attrs=["bold"], + ) if not query: print("Query field is empty, skip this search") continue @@ -1040,12 +1166,20 @@ def __process_current_product_queries(self, product_info: dict) -> None: # Censys queries processor len_of_censys_queries = len(product_info.get("censys_queries") or []) - for query_index, query_info in enumerate(product_info.get("censys_queries") or []): - if not self.__is_query_confidence_valid(query_info.get("query_confidence", "") or ""): + for query_index, query_info in enumerate( + product_info.get("censys_queries") or [] + ): + if not self.__is_query_confidence_valid( + query_info.get("query_confidence", "") or "" + ): continue query = query_info.get("query") - cprint(f"{query_index} / {len_of_censys_queries} :: " - f"Current Censys query is: {query or 'Empty query field'}", "blue", attrs=["bold"]) + cprint( + f"{query_index + 1} / {len_of_censys_queries} :: " + f"Current Censys query is: {query or 'Empty query field'}", + "blue", + attrs=["bold"], + ) if not query: print("Query field is empty, skip this search") continue @@ -1055,6 +1189,47 @@ def __process_current_product_queries(self, product_info: dict) -> None: current_host, query, product_info ) + # Masscan queries processor + len_of_masscan_settings = len(product_info.get("masscan_settings") or []) + for query_index, query_info in enumerate( + product_info.get("masscan_settings") or [] + ): + if not query_info.get("hosts"): + print("Hosts field is empty, skip this search") + continue + + hosts = query_info.get("hosts") + top_ports = int(query_info.get("top-ports") or 0) + ports = query_info.get("ports", DefaultMasscanScanValues.PORTS) + if ports == "" and top_ports == 0: + top_ports = DefaultMasscanScanValues.TOP_PORTS + rate = query_info.get("rate", DefaultMasscanScanValues.RATE) + + cprint( + f"{query_index + 1} / {len_of_masscan_settings} :: " + f"Current Masscan scan is: {hosts}", + "blue", + attrs=["bold"], + ) + + try: + masscan_raw_results = self.masscan_scan( + hosts=hosts, + ports=ports, + top_ports=top_ports, + rate=rate, + ) + self.__parse_masscan_results(masscan_raw_results, hosts, product_info) + except GrinderCoreMasscanScanError as masscan_exception: + if "FAIL: failed to detect MAC address of interface" in str(masscan_exception): + print("│ ", end="") + cprint(f"You are probably using a VPN, but Masscan is not working with one", "yellow") + print(f"└ ", end="") + cprint(f"Skip all Masscan tasks", "yellow") + break + else: + raise masscan_exception + @exception_handler(expected_exception=GrinderCoreTlsScanner) def tls_scan(self, scanner_path: str) -> None: """ @@ -1167,10 +1342,68 @@ def nmap_scan( self.shodan_processed_results[host]["nmap_scan"] = nmap_results.get(host) for host in self.censys_processed_results.keys(): self.censys_processed_results[host]["nmap_scan"] = nmap_results.get(host) + for host in self.masscan_results.keys(): + self.masscan_results[host]["nmap_scan"] = nmap_results.get(host) # Trigger to update overall results (shodan + censys as combined results) for host in self.combined_results.keys(): self.combined_results[host]["nmap_scan"] = nmap_results.get(host) + + @timer + @exception_handler(expected_exception=GrinderCoreMasscanScanError) + def masscan_scan( + self, + hosts: str = None, + ports: str = DefaultMasscanScanValues.PORTS, + top_ports: int = DefaultMasscanScanValues.TOP_PORTS, + rate: int = DefaultMasscanScanValues.RATE, + arguments: str = DefaultMasscanScanValues.ARGUMENTS, + sudo: bool = DefaultMasscanScanValues.SUDO, + ) -> dict: + """ + Initiate Masscan scan on hosts + + :param hosts: ip to scan + :param ports: ports to scan + :param top_ports: number of first ports in top ports list + :param rate: packet rate + :param arguments: masscan arguments + :param sudo: sudo if needed + :return: None + """ + print( + f'│ Masscan scan arguments: {arguments or None}, rate "{str(rate)}", ' + f'hosts: "{str(hosts)}", ports: "{str(ports)}", top ports: "{str(top_ports)}"', + ) + + # check if we already root user, + # this is necessary to run grinder in docker + if os.getuid() == 0: + sudo = False + + if ports != "" and top_ports != 0: + ports += "," + ports += ",".join([str(port) for port in TopPorts.MASSCAN_TOP_PORTS[:top_ports]]) + + masscan = MasscanConnector() + + try: + masscan.scan( + host=hosts, + ports=ports, + rate=rate, + arguments=arguments, + sudo=sudo + ) + except MasscanConnectorScanError as masscan_exception: + if "network is unreachable" not in str(masscan_exception): + raise masscan_exception + + print(f"│ Hosts count: {masscan.get_results_count()}") + print(f"└ ", end="") + + return masscan.get_results() + @exception_handler(expected_exception=GrinderCoreVulnersScanError) def vulners_scan( self, @@ -1248,6 +1481,10 @@ def vulners_scan( self.censys_processed_results[host]["vulnerabilities"].update( {"vulners_vulnerabilities": hosts_vulners.get(host)} ) + for host in self.masscan_results.keys(): + self.masscan_results[host]["vulnerabilities"].update( + {"vulners_vulnerabilities": hosts_vulners.get(host)} + ) # Trigger to update combined results, for example, when shodan and censys results # are empty for host in self.combined_results.keys(): @@ -1292,7 +1529,9 @@ def __filter_queries_by_vendor_confidence(self) -> None: if not self.vendor_confidence: return if not isinstance(self.vendor_confidence, str): - print("Confidence level for vendors is not valid: wrong type of confidence level") + print( + "Confidence level for vendors is not valid: wrong type of confidence level" + ) self.queries_file = [] return if not self.vendor_confidence.lower() in ["firm", "certain", "tentative"]: @@ -1384,12 +1623,14 @@ def __run_nse_scripts(self, workers: int, mute: bool) -> None: if not scripts: continue - cur_position = f"{index}/{results_len}" + cur_position = f"{index + 1}/{results_len}" nse_script = scripts.get("nse_script") if nse_script: nse_script_res = NmapScriptExecutor.run_script(host_info, nse_script) if not nse_script_res: - print(f"[{cur_position}] [NseExecutor: Empty output] Script {nse_script} done for {ip}") + print( + f"[{cur_position}] [NseExecutor: Empty output] Script {nse_script} done for {ip}" + ) else: print( f"[{cur_position}] [NseExecutor: Successful] Script {nse_script} done for {ip}" @@ -1408,24 +1649,33 @@ def __run_py_scripts(self, workers: int, mute: bool) -> None: :return: None """ # Reduce original queries to smaller dict with scripts - py_scripts_per_product = {f"{product.get('vendor', 'unknown')}:{product.get('product', 'unknown')}": - product.get('scripts', {}).get('py_script') - for product in self.queries_file - if product.get('scripts', {}).get('py_script')} + py_scripts_per_product = { + f"{product.get('vendor', 'unknown')}:{product.get('product', 'unknown')}": product.get( + "scripts", {} + ).get( + "py_script" + ) + for product in self.queries_file + if product.get("scripts", {}).get("py_script") + } # Compare ips to required scripts py_ip_script_mapping = dict() for ip, host_info in self.combined_results.items(): - compatible_script = py_scripts_per_product.get(f"{host_info.get('vendor')}:{host_info.get('product')}") + compatible_script = py_scripts_per_product.get( + f"{host_info.get('vendor')}:{host_info.get('product')}" + ) if not compatible_script: continue py_ip_script_mapping.update({ip: compatible_script}) # Run scripts - py_runner = PyProcessingManager(ip_script_mapping=py_ip_script_mapping, - hosts_info=self.combined_results, - workers=workers, - mute=mute) + py_runner = PyProcessingManager( + ip_script_mapping=py_ip_script_mapping, + hosts_info=self.combined_results, + workers=workers, + mute=mute, + ) py_runner.start() scripts_results = py_runner.get_results() @@ -1439,10 +1689,12 @@ def __run_py_scripts(self, workers: int, mute: bool) -> None: continue @exception_handler(expected_exception=GrinderCoreRunScriptsError) - def run_scripts(self, - queries_filename: str, - workers: int = DefaultScriptCheckerValues.WORKERS, - mute: bool = False) -> None: + def run_scripts( + self, + queries_filename: str, + workers: int = DefaultScriptCheckerValues.WORKERS, + mute: bool = False, + ) -> None: """ Initiate script execution @@ -1494,12 +1746,14 @@ def __separate_filename_wo_extension(original_filepath: str) -> str: @timer @exception_handler(expected_exception=GrinderCoreBatchSearchError) - def batch_search(self, queries_filename: str, not_incremental: bool = False) -> dict: + def batch_search( + self, queries_filename: str, not_incremental: bool = False + ) -> dict: """ Run batch search for all products from input JSON product list file. Here we are try to load JSON file with queries for different search systems, also we initialize our database (if it was not initialized - earlier), and we process every product in queries file (parsing, + earlier), and we process every product in queries file (parsing, processing, etc.). Basically it is the main search method in module. :param queries_filename: name of json file with input data @@ -1510,7 +1764,7 @@ def batch_search(self, queries_filename: str, not_incremental: bool = False) -> host_ip: { host_information ... - } + } ... } """ @@ -1544,9 +1798,14 @@ def batch_search(self, queries_filename: str, not_incremental: bool = False) -> len_of_products = len(self.queries_file) for product_index, product_info in enumerate(self.queries_file): - cprint(f"{product_index} / {len_of_products} :: Current product: {product_info.get('product')}", "blue", attrs=["bold"]) + cprint( + f"{product_index + 1} / {len_of_products} :: Current product: {product_info.get('product')}", + "blue", + attrs=["bold"], + ) self.__process_current_product_queries(product_info) # Force create combined results container self.__force_update_combined_results() + return self.combined_results diff --git a/grinder/dbhandling.py b/grinder/dbhandling.py index 0d97eea..1216f53 100644 --- a/grinder/dbhandling.py +++ b/grinder/dbhandling.py @@ -46,6 +46,7 @@ def create_db(self) -> None: - scan_information - basic table with all main information about scan - scan_data - information about product, vendor, script running and confidence - shodan_results/censys_results - for results from backend search engines + - masscan_results - for results from masscan scan :return: None """ with self.connection as db_connection: @@ -113,6 +114,23 @@ def create_db(self) -> None: ) """ ) + db_connection.execute( + """ + CREATE TABLE IF NOT EXISTS + masscan_results( + id INTEGER PRIMARY KEY AUTOINCREMENT, + scan_data_id INTEGER, + scan_information_id INTEGER, + query TEXT, + query_confidence TEXT, + results_count INTEGER, + results TEXT, + + FOREIGN KEY (scan_data_id) REFERENCES scan_data(id), + FOREIGN KEY (scan_information_id) REFERENCES scan_information(id) + ) + """ + ) @exception_handler(expected_exception=GrinderDatabaseInitialScanError) def initiate_scan(self, queries_filename: str) -> None: @@ -321,14 +339,53 @@ def add_censys_scan_data( ), ) + @exception_handler(expected_exception=GrinderDatabaseAddScanDataError) + def add_masscan_scan_data( + self, query: dict, results_count: int, results: dict or list + ) -> None: + """ + Add results from masscan for current query + :param query: result for current query + :param results_count: quantity of results + :param results: results itself + :return: None + """ + with self.connection as db_connection: + db_connection.execute( + """ + INSERT OR REPLACE INTO + masscan_results( + scan_data_id, + scan_information_id, + query, + query_confidence, + results_count, + results + ) VALUES ( + (SELECT max(id) FROM scan_data), + (SELECT max(id) FROM scan_information), + ?, + ?, + ?, + json(?) + ) + """, + ( + query.get("hosts"), + query.get("query_confidence"), + results_count, + json_dumps(results), + ), + ) + @exception_handler(expected_exception=GrinderDatabaseLoadResultsError) def load_last_results(self) -> dict: """ Load latest scan results from database, without scan linking. - This function collects last result from censys scan and - last result from shodan scan, and combine it together - with union select. Needed if you only need to load - any last results combination. + This function collects last result from censys scan, last + result from shodan scan and last result from masscan scan, + and combine it together with union select. Needed if you + only need to load any last results combination. :return: dict with results """ with self.connection as db_connection: @@ -346,6 +403,12 @@ def load_last_results(self) -> dict: SELECT max(id) FROM scan_information WHERE scan_total_results != 0 ) + UNION SELECT json_extract(results, '$') + FROM masscan_results + WHERE scan_information_id = ( + SELECT max(id) FROM scan_information + WHERE scan_total_results != 0 + ) """ ).fetchall() if not sql_results: @@ -361,11 +424,11 @@ def load_last_results_by_name(self, engine_table: str, scan_name: str = "") -> d """ Load last results with some particular scan that can be passed via 'scan_name' variable. This function returns results - only from one backend system (censys, shodan) at time, + only from one backend system (censys, shodan, masscan) at time, and only the latest _one_. If 'scan_name' is not setted, any last result from censys or shodan scan will be loaded. - :param engine_table: shodan_results, censys_results, etc. + :param engine_table: shodan_results, censys_results, masscan_results, etc. :param scan_name: name of scanning - "servers", "sd-wans", etc. :return: dict with results """ @@ -397,10 +460,10 @@ def load_last_results_by_name(self, engine_table: str, scan_name: str = "") -> d @exception_handler(expected_exception=GrinderDatabaseLoadResultsError) def load_all_results_by_name(self, engine_table: str, scan_name: str = "") -> dict: """ - Load collection of all results from one backend system (censys, shodan). - For exampe, you can load all records from Shodan with 'servers' scan, - and this function will sort only unique hosts from all of the history - of 'servers' scanning + Load collection of all results from one backend system (censys, shodan, + masscan). For example, you can load all records from Shodan with + 'servers' scan, and this function will sort only unique hosts from + all of the history of 'servers' scanning :param engine_table: shodan_results, censys_results, etc. :param scan_name: name of scanning - "servers", "sd-wans", etc. :return: dict with results @@ -435,14 +498,15 @@ def load_all_results_by_name(self, engine_table: str, scan_name: str = "") -> di def load_multiple_last_results_by_name(self) -> dict: """ Load last results with some 'scan_name' from multiple - backend systems (shodan + censys at once). This function + backend systems (shodan + censys + masscan at once). This function sort all of the host into one dictionary and returns unique results from last scan of some 'scan_name' :return: dictionary with all results, like "combined" results """ shodan_results = self.load_last_results_by_name(engine_table="shodan_results", scan_name=self.scan_name) censys_results = self.load_last_results_by_name(engine_table="censys_results", scan_name=self.scan_name) - return {**shodan_results, **censys_results} + masscan_results = self.load_last_results_by_name(engine_table="masscan_results", scan_name=self.scan_name) + return {**shodan_results, **censys_results, **masscan_results} @exception_handler(expected_exception=GrinderDatabaseLoadResultsError) def load_last_shodan_results(self) -> dict: @@ -460,6 +524,14 @@ def load_last_censys_results(self) -> dict: """ return self.load_last_results_by_name(engine_table="censys_results") + @exception_handler(expected_exception=GrinderDatabaseLoadResultsError) + def load_last_masscan_results(self) -> dict: + """ + Return latest results from masscan only + :return: dict with masscan results + """ + return self.load_last_results_by_name(engine_table="masscan_results") + @exception_handler(expected_exception=GrinderDatabaseLoadResultsError) def load_last_shodan_results_by_scan_name(self) -> dict: """ @@ -476,6 +548,14 @@ def load_last_censys_results_by_scan_name(self) -> dict: """ return self.load_last_results_by_name(engine_table="censys_results", scan_name=self.scan_name) + @exception_handler(expected_exception=GrinderDatabaseLoadResultsError) + def load_last_masscan_results_by_scan_name(self) -> dict: + """ + Return latest masscan results by some scan name (filename.json) + :return: dict with results by name + """ + return self.load_last_results_by_name(engine_table="masscan_results", scan_name=self.scan_name) + @exception_handler(expected_exception=GrinderDatabaseLoadResultsError) def load_all_shodan_results_by_scan_name(self) -> dict: """ @@ -492,6 +572,14 @@ def load_all_censys_results_by_scan_name(self) -> dict: """ return self.load_all_results_by_name(engine_table="censys_results", scan_name=self.scan_name) + @exception_handler(expected_exception=GrinderDatabaseLoadResultsError) + def load_all_masscan_results_by_scan_name(self) -> dict: + """ + Return all combined masscan results by some scan name (filename.json) + :return: dict with results by name + """ + return self.load_all_results_by_name(engine_table="masscan_results", scan_name=self.scan_name) + @exception_handler(expected_exception=GrinderDatabaseCloseError) def close(self) -> None: """ diff --git a/grinder/defaultvalues.py b/grinder/defaultvalues.py index 4ce5b94..76aa6ea 100644 --- a/grinder/defaultvalues.py +++ b/grinder/defaultvalues.py @@ -6,6 +6,7 @@ class DefaultValues: Values that used almost everywhere, most basic default values class """ + SHODAN_API_KEY: str = "YOUR_DEFAULT_API_KEY" CENSYS_API_ID: str = "YOUR_CENSYS_API_ID" CENSYS_API_SECRET: str = "YOUR_CENSYS_API_SECRET" @@ -55,6 +56,7 @@ class DefaultScriptCheckerValues: """ Default values for script scanners """ + WORKERS = 50 @@ -62,6 +64,7 @@ class DefaultTlsParserValues: """ Default values for TLS-Parser """ + PARSED_RESULTS_DIR = "tls_processed_data" FULL_RESULTS_JSON = "tls_scanner_results.json" @@ -81,6 +84,7 @@ class DefaultTlsScannerValues: """ Default values for TLS-Scanner """ + PRODUCT_LIMIT = 50 LENGTH_OF_HOSTS_SUBGROUPS = 100 NMAP_PING_SCAN_ARGS = "-n -sP" @@ -98,6 +102,7 @@ class DefaultVulnersScanValues: """ Default values for Nmap Vulners scan """ + SUDO = False PORTS = None TOP_PORTS = None @@ -106,10 +111,23 @@ class DefaultVulnersScanValues: VULNERS_SCRIPT_PATH = "/plugins/vulners.nse" +class DefaultMasscanScanValues: + """ + Default values for Masscan scan itself + """ + + PORTS: str = "" + TOP_PORTS: int = 1000 + RATE: int = 1000 + ARGUMENTS: str = "" + SUDO: bool = True + + class DefaultNmapScanValues: """ Default values for Nmap scan itself """ + PORTS = None TOP_PORTS = None SUDO = False @@ -122,6 +140,7 @@ class DefaultProcessManagerValues: """ Default values for process manager """ + PORTS = None SUDO = False ARGUMENTS = "-Pn -A --open" @@ -132,6 +151,7 @@ class DefaultPlotValues: """ Default plot values """ + PLOT_DEFAULT_AUTOPCT = "%1.1f%%" PLOT_LABEL_FONT_SIZE = 6 PLOT_SUPTITLE_FONT_SIZE = 10 @@ -143,4 +163,121 @@ class DefaultDatabaseValues: """ Default database values """ + DB_NAME = "database.db" + + +class TopPorts: + MASSCAN_TOP_PORTS = [ + "1", "3", "4", "6", "7", "9", "13", "17", "19", "20", "21", "22", "23", + "24", "25", "26", "30", "32", "33", "37", "42", "43", "49", "53", "70", + "79", "80", "81", "82", "83", "84", "85", "88", "89", "90", "99", "100", + "106", "109", "110", "111", "113", "119", "125", "135", "139", "143", "144", + "146", "161", "163", "179", "199", "211", "212", "222", "254", "255", "256", + "259", "264", "280", "301", "306", "311", "340", "366", "389", "406", "407", + "416", "417", "425", "427", "443", "444", "445", "458", "464", "465", "481", + "497", "500", "512", "513", "514", "515", "524", "541", "543", "544", "545", + "548", "554", "555", "563", "587", "593", "616", "617", "625", "631", "636", + "646", "648", "666", "667", "668", "683", "687", "691", "700", "705", "711", + "714", "720", "722", "726", "749", "765", "777", "783", "787", "800", "801", + "808", "843", "873", "880", "888", "898", "900", "901", "902", "903", "911", + "912", "981", "987", "990", "992", "993", "995", "999", "1000", "1001", + "1002", "1007", "1009", "1010", "1011", "1021", "1022", "1023", "1024", + "1025", "1026", "1027", "1028", "1029", "1030", "1031", "1032", "1033", + "1034", "1035", "1036", "1037", "1038", "1039", "1040", "1041", "1042", + "1043", "1044", "1045", "1046", "1047", "1048", "1049", "1050", "1051", + "1052", "1053", "1054", "1055", "1056", "1057", "1058", "1059", "1060", + "1061", "1062", "1063", "1064", "1065", "1066", "1067", "1068", "1069", + "1070", "1071", "1072", "1073", "1074", "1075", "1076", "1077", "1078", + "1079", "1080", "1081", "1082", "1083", "1084", "1085", "1086", "1087", + "1088", "1089", "1090", "1091", "1092", "1093", "1094", "1095", "1096", + "1097", "1098", "1099", "1100", "1102", "1104", "1105", "1106", "1107", + "1108", "1110", "1111", "1112", "1113", "1114", "1117", "1119", "1121", + "1122", "1123", "1124", "1126", "1130", "1131", "1132", "1137", "1138", + "1141", "1145", "1147", "1148", "1149", "1151", "1152", "1154", "1163", + "1164", "1165", "1166", "1169", "1174", "1175", "1183", "1185", "1186", + "1187", "1192", "1198", "1199", "1201", "1213", "1216", "1217", "1218", + "1233", "1234", "1236", "1244", "1247", "1248", "1259", "1271", "1272", + "1277", "1287", "1296", "1300", "1301", "1309", "1310", "1311", "1322", + "1328", "1334", "1352", "1417", "1433", "1434", "1443", "1455", "1461", + "1494", "1500", "1501", "1503", "1521", "1524", "1533", "1556", "1580", + "1583", "1594", "1600", "1641", "1658", "1666", "1687", "1688", "1700", + "1717", "1718", "1719", "1720", "1721", "1723", "1755", "1761", "1782", + "1783", "1801", "1805", "1812", "1839", "1840", "1862", "1863", "1864", + "1875", "1900", "1914", "1935", "1947", "1971", "1972", "1974", "1984", + "1998", "1999", "2000", "2001", "2002", "2003", "2004", "2005", "2006", + "2007", "2008", "2009", "2010", "2013", "2020", "2021", "2022", "2030", + "2033", "2034", "2035", "2038", "2040", "2041", "2042", "2043", "2045", + "2046", "2047", "2048", "2049", "2065", "2068", "2099", "2100", "2103", + "2105", "2106", "2107", "2111", "2119", "2121", "2126", "2135", "2144", + "2160", "2161", "2170", "2179", "2190", "2191", "2196", "2200", "2222", + "2251", "2260", "2288", "2301", "2323", "2366", "2381", "2382", "2383", + "2393", "2394", "2399", "2401", "2492", "2500", "2522", "2525", "2557", + "2601", "2602", "2604", "2605", "2607", "2608", "2638", "2701", "2702", + "2710", "2717", "2718", "2725", "2800", "2809", "2811", "2869", "2875", + "2909", "2910", "2920", "2967", "2968", "2998", "3000", "3001", "3003", + "3005", "3006", "3007", "3011", "3013", "3017", "3030", "3031", "3052", + "3071", "3077", "3128", "3168", "3211", "3221", "3260", "3261", "3268", + "3269", "3283", "3300", "3301", "3306", "3322", "3323", "3324", "3325", + "3333", "3351", "3367", "3369", "3370", "3371", "3372", "3389", "3390", + "3404", "3476", "3493", "3517", "3527", "3546", "3551", "3580", "3659", + "3689", "3690", "3703", "3737", "3766", "3784", "3800", "3801", "3809", + "3814", "3826", "3827", "3828", "3851", "3869", "3871", "3878", "3880", + "3889", "3905", "3914", "3918", "3920", "3945", "3971", "3986", "3995", + "3998", "4000", "4001", "4002", "4003", "4004", "4005", "4006", "4045", + "4111", "4125", "4126", "4129", "4224", "4242", "4279", "4321", "4343", + "4443", "4444", "4445", "4446", "4449", "4550", "4567", "4662", "4848", + "4899", "4900", "4998", "5000", "5001", "5002", "5003", "5004", "5009", + "5030", "5033", "5050", "5051", "5054", "5060", "5061", "5080", "5087", + "5100", "5101", "5102", "5120", "5190", "5200", "5214", "5221", "5222", + "5225", "5226", "5269", "5280", "5298", "5357", "5405", "5414", "5431", + "5432", "5440", "5500", "5510", "5544", "5550", "5555", "5560", "5566", + "5631", "5633", "5666", "5678", "5679", "5718", "5730", "5800", "5801", + "5802", "5810", "5811", "5815", "5822", "5825", "5850", "5859", "5862", + "5877", "5900", "5901", "5902", "5903", "5904", "5906", "5907", "5910", + "5911", "5915", "5922", "5925", "5950", "5952", "5959", "5960", "5961", + "5962", "5963", "5987", "5988", "5989", "5998", "5999", "6000", "6001", + "6002", "6003", "6004", "6005", "6006", "6007", "6009", "6025", "6059", + "6100", "6101", "6106", "6112", "6123", "6129", "6156", "6346", "6389", + "6502", "6510", "6543", "6547", "6565", "6566", "6567", "6580", "6646", + "6666", "6667", "6668", "6669", "6689", "6692", "6699", "6779", "6788", + "6789", "6792", "6839", "6881", "6901", "6969", "7000", "7001", "7002", + "7004", "7007", "7019", "7025", "7070", "7100", "7103", "7106", "7200", + "7201", "7402", "7435", "7443", "7496", "7512", "7625", "7627", "7676", + "7741", "7777", "7778", "7800", "7911", "7920", "7921", "7937", "7938", + "7999", "8000", "8001", "8002", "8007", "8008", "8009", "8010", "8011", + "8021", "8022", "8031", "8042", "8045", "8080", "8081", "8082", "8083", + "8084", "8085", "8086", "8087", "8088", "8089", "8090", "8093", "8099", + "8100", "8180", "8181", "8192", "8193", "8194", "8200", "8222", "8254", + "8290", "8291", "8292", "8300", "8333", "8383", "8400", "8402", "8443", + "8500", "8600", "8649", "8651", "8652", "8654", "8701", "8800", "8873", + "8888", "8899", "8994", "9000", "9001", "9002", "9003", "9009", "9010", + "9011", "9040", "9050", "9071", "9080", "9081", "9090", "9091", "9099", + "9100", "9101", "9102", "9103", "9110", "9111", "9200", "9207", "9220", + "9290", "9415", "9418", "9485", "9500", "9502", "9503", "9535", "9575", + "9593", "9594", "9595", "9618", "9666", "9876", "9877", "9878", "9898", + "9900", "9917", "9929", "9943", "9944", "9968", "9998", "9999", "10000", + "10001", "10002", "10003", "10004", "10009", "10010", "10012", "10024", + "10025", "10082", "10180", "10215", "10243", "10566", "10616", "10617", + "10621", "10626", "10628", "10629", "10778", "11110", "11111", "11967", + "12000", "12174", "12265", "12345", "13456", "13722", "13782", "13783", + "14000", "14238", "14441", "14442", "15000", "15002", "15003", "15004", + "15660", "15742", "16000", "16001", "16012", "16016", "16018", "16080", + "16113", "16992", "16993", "17877", "17988", "18040", "18101", "18988", + "19101", "19283", "19315", "19350", "19780", "19801", "19842", "20000", + "20005", "20031", "20221", "20222", "20828", "21571", "22939", "23502", + "24444", "24800", "25734", "25735", "26214", "27000", "27352", "27353", + "27355", "27356", "27715", "28201", "30000", "30718", "30951", "31038", + "31337", "32768", "32769", "32770", "32771", "32772", "32773", "32774", + "32775", "32776", "32777", "32778", "32779", "32780", "32781", "32782", + "32783", "32784", "32785", "33354", "33899", "34571", "34572", "34573", + "35500", "38292", "40193", "40911", "41511", "42510", "44176", "44442", + "44443", "44501", "45100", "48080", "49152", "49153", "49154", "49155", + "49156", "49157", "49158", "49159", "49160", "49161", "49163", "49165", + "49167", "49175", "49176", "49400", "49999", "50000", "50001", "50002", + "50003", "50006", "50300", "50389", "50500", "50636", "50800", "51103", + "51493", "52673", "52822", "52848", "52869", "54045", "54328", "55055", + "55056", "55555", "55600", "56737", "56738", "57294", "57797", "58080", + "60020", "60443", "61532", "61900", "62078", "63331", "64623", "64680", + "65000", "65129", "65389", + ] diff --git a/grinder/errors.py b/grinder/errors.py index ac7a870..e94df41 100644 --- a/grinder/errors.py +++ b/grinder/errors.py @@ -27,6 +27,19 @@ def __str__(self): return f"Error occured in Censys Connector module: {self._error_args}" +class MasscanConnectorException(Exception): + def __init__(self, error_args: Exception or str): + super().__init__(self) + self._error_args = error_args + + @property + def error_args(self): + return self._error_args + + def __str__(self): + return f"Error occured in Masscan Connector module: {self._error_args}" + + class NmapConnectorException(Exception): def __init__(self, error_args: Exception or str): super().__init__(self) @@ -175,7 +188,9 @@ def __init__(self, error_args: Exception or str): super().__init__(error_args) -class PyScriptExecutorOrganizeProcessesError(PyScriptExecutoryProcessingManagerException): +class PyScriptExecutorOrganizeProcessesError( + PyScriptExecutoryProcessingManagerException +): def __init__(self, error_args: Exception or str): super().__init__(error_args) @@ -267,6 +282,26 @@ def __init__(self, error_args: Exception or str): super().__init__(error_args) +class MasscanConnectorInitError(MasscanConnectorException): + def __init__(self, error_args: Exception or str): + super().__init__(error_args) + + +class MasscanConnectorScanError(MasscanConnectorException): + def __init__(self, error_args: Exception or str): + super().__init__(error_args) + + +class MasscanConnectorGetResultsCountError(MasscanConnectorException): + def __init__(self, error_args: Exception or str): + super().__init__(error_args) + + +class MasscanConnectorGetResultsError(MasscanConnectorException): + def __init__(self, error_args: Exception or str): + super().__init__(error_args) + + class NmapConnectorInitError(NmapConnectorException): def __init__(self, error_args: Exception or str): super().__init__(error_args) @@ -337,6 +372,11 @@ def __init__(self, error_args: Exception or str): super().__init__(error_args) +class GrinderCoreHostMasscanResultsError(GrinderCoreException): + def __init__(self, error_args: Exception or str): + super().__init__(error_args) + + class GrinderCoreUpdateMapMarkersError(GrinderCoreException): def __init__(self, error_args: Exception or str): super().__init__(error_args) @@ -447,6 +487,11 @@ def __init__(self, error_args: Exception or str): super().__init__(error_args) +class GrinderCoreMasscanSaveToDatabaseError(GrinderCoreException): + def __init__(self, error_args: Exception or str): + super().__init__(error_args) + + class GrinderCoreSaveResultsToDatabaseError(GrinderCoreException): def __init__(self, error_args: Exception or str): super().__init__(error_args) @@ -457,6 +502,11 @@ def __init__(self, error_args: Exception or str): super().__init__(error_args) +class GrinderCoreMasscanScanError(GrinderCoreException): + def __init__(self, error_args: Exception or str): + super().__init__(error_args) + + class GrinderCoreTlsScanner(GrinderCoreException): def __init__(self, error_args: Exception or str): super().__init__(error_args) diff --git a/grinder/masscanconnector.py b/grinder/masscanconnector.py new file mode 100644 index 0000000..3debe73 --- /dev/null +++ b/grinder/masscanconnector.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +import logging + +import masscan + +from grinder.decorators import exception_handler +from grinder.errors import ( + MasscanConnectorInitError, + MasscanConnectorScanError, + MasscanConnectorGetResultsError, + MasscanConnectorGetResultsCountError, +) + + +class MasscanConnector: + @exception_handler(expected_exception=MasscanConnectorInitError) + def __init__(self): + self.masscan = masscan.PortScanner() + self.results: dict = {} + + masscan.logger.setLevel(logging.CRITICAL) + + @exception_handler(expected_exception=MasscanConnectorScanError) + def scan( + self, + host: str, + rate: int or None = None, + arguments: str = "", + ports: str = "", + sudo: bool = False, + ) -> None: + """ + The basic Masscan caller. This is the "lowest" function in terms + of Grinder Framework, all calls here are going to python-masscan + library. In this function we just puts right arguments, parameters + and other things to call Masscan. + :param host: ip of the host to scan + :param rate: packet rate argument for Masscan + :param arguments: arguments for Masscan + :param ports: ports to scan with Masscan + :param sudo: is sudo required to Masscan scan? + :return: None + """ + + if rate: + arguments += f" --rate {rate}" + + # Else if user doesn't want scan for top-ports, + # let's scan with defined ports + # elif arguments and ports: + if arguments and ports: + self.masscan.scan(hosts=host, arguments=arguments, ports=ports, sudo=sudo) + + # Else if ports are not defined, let's + # scan with default ports + elif arguments: + self.masscan.scan(hosts=host, arguments=arguments, sudo=sudo) + + # Else if arguments are not defined, let's + # scan with default arguments + elif ports: + self.masscan.scan(hosts=host, arguments="", ports=ports, sudo=sudo) + + # If arguments are not set too, make + # simple scan + else: + self.masscan.scan(hosts=host, arguments="", sudo=sudo) + + self.results = {host: self.masscan[host] for host in self.masscan.all_hosts} + + @exception_handler(expected_exception=MasscanConnectorGetResultsError) + def get_results(self) -> dict: + """ + Return Masscan scan results + :return: dictionary with results {host: info} + """ + return self.results + + @exception_handler(expected_exception=MasscanConnectorGetResultsCountError) + def get_results_count(self) -> int: + """ + Return quantity of results + :return: quantity of results + """ + return len(self.results) diff --git a/grinder/nmapprocessmanager.py b/grinder/nmapprocessmanager.py index 5193a0d..cb7b6f3 100644 --- a/grinder/nmapprocessmanager.py +++ b/grinder/nmapprocessmanager.py @@ -87,7 +87,7 @@ def run(self) -> None: print( f"⭕ " - f"Current scan host ({index}/{self.quantity}): " + f"Current scan host ({index + 1}/{self.quantity}): " f"{host_ip}:{port_postfix} " f"(started at: {str(datetime.now().strftime('%H:%M:%S'))})" ) diff --git a/grinder/tlsscanner.py b/grinder/tlsscanner.py index 0a1ffa3..b1cb60e 100644 --- a/grinder/tlsscanner.py +++ b/grinder/tlsscanner.py @@ -125,7 +125,7 @@ def sort_alive_hosts(self) -> None: groups_len = len(groups) for index, group in enumerate(groups): - print(f"│ Do pingscan for {self.n} hosts ({index}/{groups_len})") + print(f"│ Do pingscan for {self.n} hosts ({index + 1}/{groups_len})") group_ips = [ip for ip in group if ip] hosts_in_nmap_format = " ".join(group_ips) nm.scan( @@ -327,7 +327,7 @@ def start_tls_scan( for index, host_port in enumerate(self.alive_hosts_with_ports.items()): host, port = host_port cprint( - f"Start TLS scan for {index} from {alive_hosts_quantity} hosts", + f"Start TLS scan for {index + 1} from {alive_hosts_quantity} hosts", "blue", attrs=["bold"], ) diff --git a/grinder/vulnersconnector.py b/grinder/vulnersconnector.py index f4889cd..948281a 100644 --- a/grinder/vulnersconnector.py +++ b/grinder/vulnersconnector.py @@ -292,7 +292,7 @@ def get_exploits_for_vulnerabilities(self) -> dict: continue print( f" - Found {len(cve_references)} exploits for {cve_without_filter} " - f"({index}/{length_of_right_filters}, total CVEs: {len(exploits.keys())})" + f"({index + 1}/{length_of_right_filters}, total CVEs: {len(exploits.keys())})" ) if not cve_references: continue diff --git a/queries/masscan_example.json b/queries/masscan_example.json new file mode 100644 index 0000000..2252a90 --- /dev/null +++ b/queries/masscan_example.json @@ -0,0 +1,22 @@ +[ + { + "vendor": "Masscan scan", + "product": "Masscan scan", + "shodan_queries": [], + "censys_queries": [], + "masscan_settings": [ + { + "hosts": "87.250.250.96/27", + "ports": "1-1024", + "rate": "1000", + "top-ports": "10" + } + ], + "scripts": { + "py_script": "test.py", + "nse_script": "test.nse" + }, + "vendor_confidence": "certain" + + } +] diff --git a/requirements.txt b/requirements.txt index c4db4a0..4861c6f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,7 +18,7 @@ Jinja2==2.10.3 kiwisolver==1.1.0 lazy-object-proxy==1.4.2 MarkupSafe==1.1.1 -matplotlib==3.1.1 +matplotlib==3.2.1 mccabe==0.6.1 more-itertools==7.2.0 netaddr==0.7.19 @@ -36,6 +36,7 @@ pytest==5.2.1 pytest-cov==2.8.1 pytest-mock==1.11.1 python-dateutil==2.8.0 +python-masscan==0.1.6 python-nmap==0.6.1 repoze.lru==0.7 requests==2.22.0 diff --git a/tests/test_database.py b/tests/test_database.py index ddf7fe0..425e615 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -106,6 +106,7 @@ def test_database_existing_tables(connection: Connection_instance) -> None: ("scan_data",), ("shodan_results",), ("censys_results",), + ("masscan_results",), ] ) @@ -200,6 +201,29 @@ def test_database_existing_censys_results_columns( ) +def test_database_existing_masscan_results_columns( + connection: Connection_instance +) -> None: + """ + Check column names of 'masscan_results' table + :param connection: sqlite3.Connection object + :return: None + """ + assert sorted( + connection.execute("PRAGMA table_info(masscan_results)").fetchall() + ) == sorted( + [ + (0, "id", "INTEGER", 0, None, 1), + (1, "scan_data_id", "INTEGER", 0, None, 0), + (2, "scan_information_id", "INTEGER", 0, None, 0), + (3, "query", "TEXT", 0, None, 0), + (4, "query_confidence", "TEXT", 0, None, 0), + (5, "results_count", "INTEGER", 0, None, 0), + (6, "results", "TEXT", 0, None, 0), + ] + ) + + def test_create_database_error() -> None: """ Check if database creating will fail @@ -536,6 +560,76 @@ def test_add_censys_scan_data_success(connection: Connection_instance) -> None: assert loads(censys_data_results[6]) == [{"ip": "66.66.66.66"}] +def test_add_masscan_scan_data_error(connection: Connection_instance) -> None: + """ + Check if we can properly handle errors that will be raised + with add scan data method + :param connection: sqlite3.Connection object + :return: None + """ + connection_backup = db.connection + db.connection = None + + def add_scan_data(): + db.add_masscan_scan_data( + query={}, + results_count=0, + results=[], + ) + + with raises(GrinderDatabaseAddScanDataError): + add_scan_data() + with raises(GrinderDatabaseException): + add_scan_data() + db.connection = connection_backup + + +def test_add_masscan_scan_data_success(connection: Connection_instance) -> None: + """ + This test checks if we can successfully put scan data + results into database (for Masscan in this case) + :param connection: sqlite3.Connection object + :return: None + """ + scan_data_values = [ + { + "query": {"hosts": "88.88.88.88"}, + "results_count": 1, + "results": [{"ip": "88.88.88.88"}] + }, + { + "query": {"hosts": "99.99.99.99"}, + "results_count": 2, + "results": [{"ip": "99.99.99.99"}] + }, + { + "query": {"hosts": "10.10.10.10"}, + "results_count": 3, + "results": [{"ip": "10.10.10.10"}] + } + ] + for scan_data_value in scan_data_values: + db.add_masscan_scan_data( + **scan_data_value + ) + masscan_data_results = connection.execute( + """ + SELECT * FROM masscan_results + WHERE id = ( + SELECT max(id) + FROM masscan_results + ) + """ + ).fetchall()[0] + assert isinstance(masscan_data_results[0], int) + assert isinstance(masscan_data_results[1], int) + assert isinstance(masscan_data_results[2], int) + assert masscan_data_results[3] == "10.10.10.10" + assert masscan_data_results[4] is None + assert masscan_data_results[5] == 3 + assert loads(masscan_data_results[6]) == [{"ip": "10.10.10.10"}] + + def test_load_last_results_error() -> None: """ Check if we can correctly catch exceptions @@ -565,7 +659,10 @@ def test_load_last_results_success() -> None: "33.33.33.33": {"ip": "33.33.33.33"}, "44.44.44.44": {"ip": "44.44.44.44"}, "55.55.55.55": {"ip": "55.55.55.55"}, - "66.66.66.66": {"ip": "66.66.66.66"} + "66.66.66.66": {"ip": "66.66.66.66"}, + "88.88.88.88": {"ip": "88.88.88.88"}, + "99.99.99.99": {"ip": "99.99.99.99"}, + "10.10.10.10": {"ip": "10.10.10.10"}, } @@ -589,7 +686,7 @@ def test_load_last_results_by_name_error() -> None: def test_load_last_results_by_name_success() -> None: """ Check if we can successfully load all last results - by name from different backend engines (Shodan, Censys) + by name from different backend engines (Shodan, Censys, Masscan) :return: None """ assert db.load_last_results_by_name( @@ -606,6 +703,13 @@ def test_load_last_results_by_name_success() -> None: "55.55.55.55": {"ip": "55.55.55.55"}, "66.66.66.66": {"ip": "66.66.66.66"} } + assert db.load_last_results_by_name( + scan_name="pytest", engine_table="masscan_results" + ) == { + "88.88.88.88": {"ip": "88.88.88.88"}, + "99.99.99.99": {"ip": "99.99.99.99"}, + "10.10.10.10": {"ip": "10.10.10.10"} + } assert ( db.load_last_results_by_name( scan_name="not_exists", engine_table="censys_results" @@ -665,6 +769,22 @@ def test_load_all_results_by_name_censys_success() -> None: } +def test_load_all_results_by_name_masscan_success() -> None: + """ + This test checks if we can successfully + load latest scan results from Masscan + :return: None + """ + assert db.load_all_results_by_name( + scan_name="pytest", engine_table="masscan_results" + ) == { + "88.88.88.88": {"ip": "88.88.88.88"}, + "99.99.99.99": {"ip": "99.99.99.99"}, + "10.10.10.10": {"ip": "10.10.10.10"} + } + + + def test_load_multiple_last_results_by_name_error() -> None: """ This test checks if we will catch proper @@ -684,7 +804,7 @@ def test_load_multiple_last_results_by_name_error() -> None: def test_load_multiple_last_resuls_by_name_success() -> None: """ This test checks if we can successfully load - _all_ latest results from Shodan and Censys + _all_ latest results from Shodan, Censys and Masscan that connected with last scan. :return: None """ @@ -694,7 +814,10 @@ def test_load_multiple_last_resuls_by_name_success() -> None: "33.33.33.33": {"ip": "33.33.33.33"}, "44.44.44.44": {"ip": "44.44.44.44"}, "55.55.55.55": {"ip": "55.55.55.55"}, - "66.66.66.66": {"ip": "66.66.66.66"} + "66.66.66.66": {"ip": "66.66.66.66"}, + "88.88.88.88": {"ip": "88.88.88.88"}, + "99.99.99.99": {"ip": "99.99.99.99"}, + "10.10.10.10": {"ip": "10.10.10.10"} } @@ -708,10 +831,13 @@ def test_custom_database_getters_handlers_error() -> None: possible_functions = [ db.load_last_shodan_results, db.load_last_censys_results, + db.load_last_masscan_results, db.load_last_shodan_results_by_scan_name, db.load_last_censys_results_by_scan_name, + db.load_last_masscan_results_by_scan_name, db.load_all_shodan_results_by_scan_name, db.load_all_censys_results_by_scan_name, + db.load_all_masscan_results_by_scan_name, ] for function in possible_functions: connection_backup = db.connection @@ -744,6 +870,11 @@ def test_custom_database_getters_handlers_success() -> None: "55.55.55.55": {"ip": "55.55.55.55"}, "66.66.66.66": {"ip": "66.66.66.66"} } + assert db.load_last_masscan_results() == { + "88.88.88.88": {"ip": "88.88.88.88"}, + "99.99.99.99": {"ip": "99.99.99.99"}, + "10.10.10.10": {"ip": "10.10.10.10"} + } def test_initiate_one_more_scan_results() -> None: @@ -767,6 +898,7 @@ def test_initiate_one_more_scan_results() -> None: ) db.add_shodan_scan_data(**another_results) db.add_censys_scan_data(**another_results) + db.add_masscan_scan_data(**another_results) db.update_results_count(total_products=42, total_results=1337) db.update_end_time() assert db.load_last_shodan_results_by_scan_name() == { @@ -775,12 +907,18 @@ def test_initiate_one_more_scan_results() -> None: assert db.load_last_censys_results_by_scan_name() == { "77.77.77.77": {"ip": "77.77.77.77"} } + assert db.load_last_masscan_results_by_scan_name() == { + "77.77.77.77": {"ip": "77.77.77.77"} + } assert db.load_all_shodan_results_by_scan_name() == { "77.77.77.77": {"ip": "77.77.77.77"} } assert db.load_all_censys_results_by_scan_name() == { "77.77.77.77": {"ip": "77.77.77.77"} } + assert db.load_all_masscan_results_by_scan_name() == { + "77.77.77.77": {"ip": "77.77.77.77"} + } assert db.load_all_results_by_name(engine_table="censys_results") == { "44.44.44.44": {"ip": "44.44.44.44"}, "55.55.55.55": {"ip": "55.55.55.55"}, @@ -793,6 +931,12 @@ def test_initiate_one_more_scan_results() -> None: "33.33.33.33": {"ip": "33.33.33.33"}, "77.77.77.77": {"ip": "77.77.77.77"} } + assert db.load_all_results_by_name(engine_table="masscan_results") == { + "88.88.88.88": {"ip": "88.88.88.88"}, + "99.99.99.99": {"ip": "99.99.99.99"}, + "10.10.10.10": {"ip": "10.10.10.10"}, + "77.77.77.77": {"ip": "77.77.77.77"} + } def test_change_scan_name_results() -> None: @@ -810,6 +954,9 @@ def test_change_scan_name_results() -> None: assert db.load_all_results_by_name(engine_table="shodan_results", scan_name="another_test") == { "77.77.77.77": {"ip": "77.77.77.77"} } + assert db.load_all_results_by_name(engine_table="masscan_results", scan_name="another_test") == { + "77.77.77.77": {"ip": "77.77.77.77"} + } assert db.load_all_results_by_name(engine_table="censys_results", scan_name="pytest") == { "44.44.44.44": {"ip": "44.44.44.44"}, "55.55.55.55": {"ip": "55.55.55.55"}, @@ -820,3 +967,8 @@ def test_change_scan_name_results() -> None: "22.22.22.22": {"ip": "22.22.22.22"}, "33.33.33.33": {"ip": "33.33.33.33"} } + assert db.load_all_results_by_name(engine_table="masscan_results", scan_name="pytest") == { + "88.88.88.88": {"ip": "88.88.88.88"}, + "99.99.99.99": {"ip": "99.99.99.99"}, + "10.10.10.10": {"ip": "10.10.10.10"} + } diff --git a/tests/test_masscanconnector.py b/tests/test_masscanconnector.py new file mode 100644 index 0000000..8c7cec3 --- /dev/null +++ b/tests/test_masscanconnector.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python3 + +from unittest.mock import patch + +from pytest import raises + +from grinder.defaultvalues import DefaultMasscanScanValues +from grinder.errors import ( + MasscanConnectorInitError, + MasscanConnectorScanError, + MasscanConnectorGetResultsError, + MasscanConnectorGetResultsCountError, +) +from grinder.masscanconnector import MasscanConnector + + +class MasscanTestDefaultValues: + HOST = "8.8.8.8" + PORTS = "53" + + +def setup_module() -> None: + """ + Initialize MasscanConnector for various tests + :return: + """ + + global mc + mc = MasscanConnector() + + +def test_masscanconnector_init() -> None: + """ + Check if we can successfully create new MasscanConnector instance + :return: + """ + MasscanConnector() + + +def test_masscanconnector_init_error() -> None: + """ + Raise MasscanConnectorInitError and check output of it + :return: + """ + with patch( + "grinder.masscanconnector.MasscanConnector.__init__", + side_effect=MasscanConnectorInitError("test"), + ): + with raises(MasscanConnectorInitError) as init_error: + MasscanConnector() + assert "Error occured in Masscan Connector module: test" == str( + init_error.value + ) + + +def test_masscanconnector_scan_ip() -> None: + """ + Check if we can successfully scan 8.8.8.8 host + :return: + """ + + mc.scan( + host=MasscanTestDefaultValues.HOST, + rate=DefaultMasscanScanValues.RATE, + arguments=DefaultMasscanScanValues.ARGUMENTS, + ports=str(MasscanTestDefaultValues.PORTS), + sudo=DefaultMasscanScanValues.SUDO, + ) + + assert ( + mc.get_results() + .get(MasscanTestDefaultValues.HOST) + .get("tcp") + .get(int(MasscanTestDefaultValues.PORTS), False) + ) + + +def test_masscanconnector_scan_error() -> None: + """ + Raise MasscanConnectorScanError and check output of it + :return: + """ + with patch( + "grinder.masscanconnector.MasscanConnector.scan", + side_effect=MasscanConnectorScanError("test"), + ): + with raises(MasscanConnectorScanError) as scan_error: + mc.scan( + host=MasscanTestDefaultValues.HOST, + ports=MasscanTestDefaultValues.PORTS, + rate=DefaultMasscanScanValues.RATE, + sudo=DefaultMasscanScanValues.SUDO, + ) + assert "Error occured in Masscan Connector module: test" == str( + scan_error.value + ) + + +def test_masscanconnector_scan_without_any_args() -> None: + """ + Test MasscanConnector scan running without any args + :return: + """ + with raises(MasscanConnectorScanError) as scan_error: + mc.scan(host="", arguments="", ports="", sudo=DefaultMasscanScanValues.SUDO) + assert "FAIL: target IP address list empty" in str(scan_error.value) + + +def test_masscanconnector_scan_bad_argument() -> None: + """ + Test MasscanConnector scan running with bad arguments + :return: + """ + + with raises(MasscanConnectorScanError) as scan_error: + mc.scan( + host=MasscanTestDefaultValues.HOST, + ports=MasscanTestDefaultValues.PORTS, + rate=DefaultMasscanScanValues.RATE, + arguments="bad-argument", + ) + assert "FAIL: unknown command-line parameter" in str(scan_error.value) + + +def test_masscanconnector_get_results() -> None: + """ + Check if we can successfully get MasscanConnector scan results + :return: + """ + assert mc.get_results() + + +def test_masscanconnector_get_results_error() -> None: + """ + Raise MasscanConnectorGetResultsError and check output of it + :return: + """ + with patch( + "grinder.masscanconnector.MasscanConnector.get_results", + side_effect=MasscanConnectorGetResultsError("test"), + ): + with raises(MasscanConnectorGetResultsError) as get_results_error: + mc.get_results() + assert "Error occured in Masscan Connector module: test" == str( + get_results_error.value + ) + + +def test_masscanconnector_get_results_count() -> None: + """ + Check if we can successfully get the count of + MasscanConnector scan results + :return: + """ + assert mc.get_results_count() + + +def test_masscanconnector_get_results_count_error() -> None: + """ + Raise MasscanConnectorGetResultsCountError and check output of it + :return: + """ + with patch( + "grinder.masscanconnector.MasscanConnector.get_results_count", + side_effect=MasscanConnectorGetResultsCountError("test"), + ): + with raises(MasscanConnectorGetResultsCountError) as get_results_count_error: + mc.get_results_count() + assert "Error occured in Masscan Connector module: test" == str( + get_results_count_error.value + )