diff --git a/Abuse_IPDB.py b/Abuse_IPDB.py new file mode 100644 index 0000000..54bcef5 --- /dev/null +++ b/Abuse_IPDB.py @@ -0,0 +1,57 @@ +import requests +from prettytable import PrettyTable +import socket +from Colors import get_color + +# Get site IP +def resolve_url_to_ip(url): + try: + domain = url.split('//')[-1].split('/')[0] + ip_address = socket.gethostbyname(domain) + return ip_address + except socket.gaierror: + return None + + +def check_ip_reputation(api_key, ip_address): + url = 'https://api.abuseipdb.com/api/v2/check' + headers = { + 'Accept': 'application/json', + 'Key': api_key + } + params = { + 'ipAddress': ip_address, + 'maxAgeInDays': '90' + } + response = requests.get(url, headers=headers, params=params) + if response.status_code == 200: + result = response.json() + data = result['data'] + + # Create a table + table = PrettyTable() + table.field_names = ["Attribute", "Value"] + table.add_row(["IP Address", data['ipAddress']]) + table.add_row(["Is Public", data['isPublic']]) + table.add_row(["IP Version", data['ipVersion']]) + table.add_row([f"{get_color('RED')}Is Whitelisted{get_color('RESET')}", data['isWhitelisted']]) + table.add_row(["Abuse Confidence Score", data['abuseConfidenceScore']]) + table.add_row(["Country Code", data['countryCode']]) + table.add_row(["ISP", data['isp']]) + table.add_row(["Domain", data['domain']]) + table.add_row(["Is Tor", data['isTor']]) + table.add_row([f"{get_color('RED')}Total Reports{get_color('RESET')}", data['totalReports']]) + table.add_row(["Num Distinct Users", data['numDistinctUsers']]) + table.add_row(["Last Reported At", data['lastReportedAt']]) + + return table + else: + print(f"Error: HTTP {response.status_code} - {response.reason}") + +def check_IP_AbuseIPDB(api_key,url): + ip_address = resolve_url_to_ip(url) + if ip_address: + print(f"Resolved IP Address: {ip_address}") + return check_ip_reputation(api_key,ip_address) + else: + return "Unable to resolve IP address." \ No newline at end of file diff --git a/AsciiArt.py b/AsciiArt.py new file mode 100644 index 0000000..12c9868 --- /dev/null +++ b/AsciiArt.py @@ -0,0 +1,61 @@ +from Colors import get_color + +def logo(): + lg = """ +─────────█▄██▄█ +█▄█▄█▄█▄█▐█┼██▌█▄█▄█▄█▄█ +███┼█████▐████▌█████┼███ +█████████▐████▌█████████ +""" + print(lg) + +def Serpent(): + lg = """ + ▄▄▀█▄───▄───────▄ + ▀▀▀██──███─────███ + ░▄██▀░█████░░░█████░░ + ███▀▄███░███░███░███░▄ + ▀█████▀░░░▀███▀░░░▀██▀ + """ + print(lg) + +def EYS(): + lg = f"""{get_color('ORANGE')} +⠀⠀⠀⠀⠀⠀⢀⠀⠀⠀⠀⠀⠀⢠⡆⠀⠀⠀⠀⠀⠀⡀⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠈⣷⣄⠀⠀⠀⠀⣾⣷⠀⠀⠀⠀⣠⣾⠃⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⢿⠿⠃⠀⠀⠀⠉⠉⠁⠀⠀⠐⠿⡿⠀⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⣀⣠⣤⣤⣶⣶⣶⣤⣤⣄⣀⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⢀⣤⣶⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⣦⣄⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⣠⣶⣿⣿⡿⣿⣿⣿⡿⠋⠉⠀⠀⠉⠙⢿⣿⣿⡿⣿⣿⣷⣦⡀⠀⠀⠀ +⠀⢀⣼⣿⣿⠟⠁⢠⣿⣿⠏⠀⠀⢠⣤⣤⡀⠀⠀⢻⣿⣿⡀⠙⢿⣿⣿⣦⠀⠀ +⣰⣿⣿⡟⠁⠀⠀⢸⣿⣿⠀⠀⠀⢿⣿⣿⡟⠀⠀⠈⣿⣿⡇⠀⠀⠙⣿⣿⣷⡄ +⠈⠻⣿⣿⣦⣄⠀⠸⣿⣿⣆⠀⠀⠀⠉⠉⠀⠀⠀⣸⣿⣿⠃⢀⣤⣾⣿⣿⠟⠁ +⠀⠀⠈⠻⣿⣿⣿⣶⣿⣿⣿⣦⣄⠀⠀⠀⢀⣠⣾⣿⣿⣿⣾⣿⣿⡿⠋⠁⠀⠀ +⠀⠀⠀⠀⠀⠙⠻⢿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⠿⠛⠁⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠈⠉⠛⠛⠿⠿⠿⠿⠿⠿⠛⠋⠉⠀⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⢰⣷⡦⠀⠀⠀⢀⣀⣀⠀⠀⠀⢴⣾⡇⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⣸⠟⠁⠀⠀⠀⠘⣿⡇⠀⠀⠀⠀⠙⢷⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠁⠀⠀⠀⠀⠀⠀⠻⠀⠀⠀⠀⠀⠀⠈⠀⠀⠀⠀⠀⠀⠀⠀ +{get_color('RESET')}""" + print(lg) + + +def MGGL(): + lg = """ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣀⣀⣀⣀⣀⡀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣴⠾⠛⢉⣉⣉⣉⡉⠛⠷⣦⣄⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣴⠋⣠⣴⣿⣿⣿⣿⣿⡿⣿⣶⣌⠹⣷⡀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣼⠁⣴⣿⣿⣿⣿⣿⣿⣿⣿⣆⠉⠻⣧⠘⣷⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢰⡇⢰⣿⣿⣿⣿⣿⣿⣿⣿⣿⡿⠀⠀⠈⠀⢹⡇⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢸⡇⢸⣿⠛⣿⣿⣿⣿⣿⣿⡿⠃⠀⠀⠀⠀⢸⡇⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⣷⠀⢿⡆⠈⠛⠻⠟⠛⠉⠀⠀⠀⠀⠀⠀⣾⠃⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠸⣧⡀⠻⡄⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣼⠃⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢼⠿⣦⣄⠀⠀⠀⠀⠀⠀⠀⣀⣴⠟⠁⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⣠⣾⣿⣦⠀⠀⠈⠉⠛⠓⠲⠶⠖⠚⠋⠉⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⣠⣾⣿⣿⠟⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⣠⣾⣿⣿⠟⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⣾⣿⣿⠟⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀NoPhish⠀⠀⠀ +⠀⢀⣄⠈⠛⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀ +""" + print(lg) \ No newline at end of file diff --git a/Colors.py b/Colors.py new file mode 100644 index 0000000..ce42dce --- /dev/null +++ b/Colors.py @@ -0,0 +1,12 @@ +def get_color(color_name): + colors = { + "RESET": "\033[0m", + "RED": "\033[91m", + "GREEN": "\033[92m", + "YELLOW": "\033[93m", + "BLUE": "\033[94m", + "PURPLE": "\033[95m", + "CYAN": "\033[96m", + "ORANGE": "\033[38;5;208m" + } + return colors.get(color_name.upper(), "\033[0m") \ No newline at end of file diff --git a/GoogleSafeBrowsing.py b/GoogleSafeBrowsing.py new file mode 100644 index 0000000..e0a8e96 --- /dev/null +++ b/GoogleSafeBrowsing.py @@ -0,0 +1,37 @@ +import requests +from prettytable import PrettyTable +from Colors import get_color + +# Google Save Browsing +def check_with_google_safe_browsing(url, api_key): + api_url = "https://safebrowsing.googleapis.com/v4/threatMatches:find" + payload = { + "client": { + "clientId": "NoPhish", + "clientVersion": "1.0" + }, + "threatInfo": { + "threatTypes": ["MALWARE", "SOCIAL_ENGINEERING"], + "platformTypes": ["ANY_PLATFORM"], + "threatEntryTypes": ["URL"], + "threatEntries": [{"url": url}] + } + } + params = {'key': api_key} + response = requests.post(api_url, params=params, json=payload) + if response.status_code == 200: + matches = response.json() + return matches + else: + return None + +def display_google_safe_browsing_results(url, api_key): + result = check_with_google_safe_browsing(url, api_key) + if result and result.get('matches'): + table = PrettyTable() + table.field_names = [f"{get_color('RED')}Threat Type{get_color('RESET')}", "Platform Type", "URL", "Cache Duration"] + for match in result['matches']: + table.add_row([match['threatType'], match['platformType'], match['threat']['url'], match['cacheDuration']]) + print(table) + else: + print("No threats detected by Google Safe Browsing.") \ No newline at end of file diff --git a/NoPhish.py b/NoPhish.py new file mode 100644 index 0000000..97446df --- /dev/null +++ b/NoPhish.py @@ -0,0 +1,111 @@ +from Colors import get_color +from UrlCheck import checkURL,check_ip_reputation +import os +from AsciiArt import logo, Serpent, EYS, MGGL +from UrlCheck import abuse_ipdb_api + +def Introduction(): + clear_screen() + Intro = f"""{get_color('BLUE')} +▒█▄░▒█ █▀▀█ ▒█▀▀█ ░▀░ █▀▀ █░░█ +▒█▒█▒█ █░░█ ▒█▄▄█ ▀█▀ ▀▀█ █▀▀█ +▒█░░▀█ ▀▀▀▀ ▒█░░░ ▀▀▀ ▀▀▀ ▀░░▀ +{get_color('RESET')}\ +By: CmdSNR +Version: 1.5 [ BETA ] +{get_color('RED')}NOTE: Some APIs might have daily limits.{get_color('RESET')}{get_color('GREEN')}\nUse command "manual" to receive everything you need to know about how to use the scanner.{get_color('RESET')} +Use "help" to display the help commands all over again. +Use "clear" to clear the screen. + """ + print(Intro) + #time.sleep(5) + CommandLine() + + +""" +------------------- ESSENTIALS ------------------- +""" + +def Mann(): + mann = ['Get Started','Fix Errors','Required APIs & How To Get Them'] + manual = { + + } + print(f"{get_color('GREEN')}For the Manual use numbers to select a specific options.{get_color('RESET')}") + for index,content in enumerate(mann): + print(f'{index + 1}. {content}\n') + try: + user_input = int(input('Select option: ')) + match user_input: + case 1: + print('Visit https://www.nrelm.com/nophish for full details.') + case 2: + print("Visit: https://github.com/sytaxus/NoPhish/issues | if you don't see your issue there, please open a ticket and be as specific as possible.") + case 3: + print('Visit https://www.nrelm.com/nophish for full details.\nRequired APIs:\n\n1. Google Safe Browsing API : https://developers.google.com/safe-browsing/v4/get-started \n\n2. Rapid API (Exerra):https://rapidapi.com/Exerra/api/exerra-phishing-check/ \n\n3. IpQualityScore API: https://www.ipqualityscore.com/documentation/proxy-detection-api/overview \n\n4. AbuseIPDB API: https://www.abuseipdb.com/api.html \n\n5. Url.io API: https://urlscan.io/docs/api/ \n\n6. VirusTotal API: https://www.virustotal.com/ \n') + except Exception as e: + print(f'Error: {e}') + + +def CommandLine(): + commands_Available = ['Scan For Phishing', 'Manual', 'Check IP Reputation', 'Help', 'Clear', 'Exit'] + + command_keys = { + 'manual':2, + 'help':4, + 'clear': 5, + 'exit': 6 + } + + print('Select the option of choice.') + for index, command in enumerate(commands_Available): + print(f"{index + 1}. {command}") + + while True: + userinput = input(f"{get_color('BLUE')}NoPish >{get_color('RESET')} ").strip().lower() + if userinput.isdigit(): + choice = int(userinput) + elif userinput in command_keys: + choice = command_keys[userinput] + else: + print("Invalid command. Please enter a number or a recognized command.") + continue + + if choice == 1: + clear_screen() + logo() + print(checkURL()) + elif choice == 2: + clear_screen() + MGGL() + Mann() + elif choice == 3: + clear_screen() + EYS() + ip_address = input('Enter IP Address to check: ') + result = check_ip_reputation(abuse_ipdb_api,ip_address) + print(result) + elif choice == 4: + clear_screen() + Introduction() + for index, command in enumerate(commands_Available): + print(f"{index + 1}. {command}") + elif choice == 5: + clear_screen() + elif choice == 6: + print("Exiting...") + break + else: + print("Invalid option. Please select a valid number or command.") + +def clear_screen(): + # Check if the operating system is Windows + if os.name == 'nt': + _ = os.system('cls') + # Otherwise, it's assumed to be Unix/Linux + else: + _ = os.system('clear') + + +if __name__ == "__main__": + Introduction() diff --git a/RapidAPIs.py b/RapidAPIs.py new file mode 100644 index 0000000..a4cd165 --- /dev/null +++ b/RapidAPIs.py @@ -0,0 +1,47 @@ +from prettytable import PrettyTable +import requests +from Colors import get_color + +""" +EXERRA API + +REQUIRES HTTP:// / HTTPS:// +""" + +def format_url_for_exerra(url): + if not url.startswith(('http://', 'https://')): + url = 'https://' + url + return url + +def check_exerra_phishing(original_url,api_key): + url = format_url_for_exerra(original_url) + api_url = "https://exerra-phishing-check.p.rapidapi.com/" + querystring = {"url": url} + headers = { + "X-RapidAPI-Key": api_key, + "X-RapidAPI-Host": "exerra-phishing-check.p.rapidapi.com" + } + + response = requests.get(api_url, headers=headers, params=querystring) + data = response.json() + + exerra_table = PrettyTable() + exerra_table.field_names = ["Attribute", "Value"] + + status = data.get('status', 'N/A') + is_scam = data.get('data', {}).get('isScam', 'N/A') + domain = data.get('data', {}).get('domain', 'N/A') + detection_type = data.get('data', {}).get('detection', {}).get('type', 'N/A') + + exerra_table.add_row(["Status", status]) + exerra_table.add_row(["Domain", domain]) + exerra_table.add_row([f"{get_color('RED')}Is Scam?{get_color('RESET')}", is_scam]) + exerra_table.add_row(["Detection Type", detection_type]) + + return exerra_table + +""" +openSQUAT API + +""" + diff --git a/UrlCheck.py b/UrlCheck.py new file mode 100644 index 0000000..985af19 --- /dev/null +++ b/UrlCheck.py @@ -0,0 +1,122 @@ +from VirusTotalCheck import virus_total_urlanalysis, get_analysis_report +from Abuse_IPDB import check_ip_reputation, check_IP_AbuseIPDB +from GoogleSafeBrowsing import display_google_safe_browsing_results +from prettytable import PrettyTable +from Colors import get_color +from ipqualityscore import check_ipqualityscore +from urlio import urlscan_io_analysis +from RapidAPIs import check_exerra_phishing +import time +from cmdsnrChecker import analyze_website + +""" +CHANGE THIS ----------------------- +""" +abuse_ipdb_api = "API KEY HERE" + +def checkURL(): + # APIS + global abuse_ipdb_api + """ + CHANGE THESE ------------------------- + """ + ipscore_api = "API KEY HERE" + api_key_urlscan = "API KEY HERE" + api_key_gs = "API KEY HERE" + api_key_virustotal = "API KEY HERE" + api_ExerraPhish = "API KEY HERE" + + space = "======" * 12 + + url = input('Enter URL to scan: ') + + # CHECKING WITH RAPID FAST APIs. + print(space) + print("Checking with Exerra Phishing Check...\n") + exerra_result_table = check_exerra_phishing(url,api_ExerraPhish) + print(exerra_result_table) + + time.sleep(5) + + #Ip quality score. + print(space) + print('Checking with IP QUALITY SCORE. \n') + data = check_ipqualityscore(url, ipscore_api) + + if data.get("success", True): # Default to True for backward compatibility + ipqs_table = PrettyTable() + ipqs_table.field_names = ["Attribute", "Value"] + for key, value in data.items(): + ipqs_table.add_row([key, value]) + print(ipqs_table) + else: + print(data["message"]) + + #ABUSEIPDB + print(space) + time.sleep(5) + print("Checking IP reputation with AbuseIPDB...\n") + abuse_ipdb_result = check_IP_AbuseIPDB(abuse_ipdb_api,url) + print(abuse_ipdb_result) + print(space) + + #GSB + print("\nNext Scan GSB in:", end=" ", flush=True) + for i in range(5,-1,-1): + print(f"\rNext Scan With GSB in: {i}", end=" ", flush=True) + time.sleep(1) + if i == 0: + print(f"\n{get_color('BLUE')}Google Safe Browsing (GSB) Verdict:{get_color('RESET')}") + display_google_safe_browsing_results(url, api_key_gs) + print(space) + + # url io + print(f"\nPerforming {get_color('BLUE')}urlScan.io{get_color('RESET')} scan:\n") + urlscan_io_analysis(url, api_key_urlscan) + + print(space) + + print(analyze_website(url)) + + print(space) + + # VirusTotal + print("\nPerforming VirusTotal URL analysis In:", end=" ", flush=True) + for i in range(5,-1,-1): + print(f"\rPerforming VirusTotal URL analysis In: {i}", end=" ", flush=True) + time.sleep(1) + if i == 0: + print(f"\n{get_color('BLUE')}Virus Total Verdict:{get_color('RESET')}") + + + + url_id = virus_total_urlanalysis(url, api_key_virustotal) + if url_id: + report = get_analysis_report(url_id, api_key_virustotal) + if 'data' in report: + stats = report['data']['attributes']['stats'] + malicious = stats['malicious'] + suspicious = stats['suspicious'] + harmless = stats['harmless'] + undetected = stats['undetected'] + total_scans = malicious + suspicious + harmless + undetected + + vt_table = PrettyTable() + vt_table.field_names = ["Category", "Count"] + vt_table.add_row([f"{get_color('RED')}Malicious{get_color('RESET')}", f"{malicious} (out of {total_scans})"]) + vt_table.add_row([f"{get_color('ORANGE')}Suspicious{get_color('RESET')}", suspicious]) + vt_table.add_row(["Harmless", harmless]) + vt_table.add_row(["Undetected", undetected]) + if malicious >= 7: + print(f"\nNOTE: Even though {harmless} anti-viruses did not flag it, since the number of malicious reports exceeds at least 7, there is a high chance of it being an actual phishing or malicious page.") + elif harmless == 0 and malicious == 0: + print('There might have been an error while scanning with virus total, try scanning again and see if you get any results. If not then results should be accurate.') + else: + print("None") + + return vt_table + else: + print("Analysis result unavailable.") + else: + print("Error submitting URL for analysis.") + diff --git a/VirusTotalCheck.py b/VirusTotalCheck.py new file mode 100644 index 0000000..776da82 --- /dev/null +++ b/VirusTotalCheck.py @@ -0,0 +1,38 @@ +import requests + + +""" +ONLY ACCEPTS HTTP:// +""" + +def format_url_for_virustotal(url): + if url.startswith('https://'): + url = 'http://' + url[len('https://'):] + elif not url.startswith('http://'): + url = 'http://' + url + return url + +def virus_total_urlanalysis(original_url, api_key): + url = format_url_for_virustotal(original_url) + headers = { + 'x-apikey': api_key + } + response = requests.post('https://www.virustotal.com/api/v3/urls', headers=headers, data={'url': url}) + if response.status_code == 200: + result = response.json() + url_id = result['data']['id'] + return url_id + else: + print(f"Error submitting URL for analysis: {response.status_code}") + return None + +def get_analysis_report(url_id, api_key): + headers = { + 'x-apikey': api_key + } + response = requests.get(f'https://www.virustotal.com/api/v3/analyses/{url_id}', headers=headers) + if response.status_code == 200: + result = response.json() + return result + else: + return f"Error: Code Response: {response.status_code}" diff --git a/cmdsnrChecker.py b/cmdsnrChecker.py new file mode 100644 index 0000000..ecf7f6f --- /dev/null +++ b/cmdsnrChecker.py @@ -0,0 +1,111 @@ +from requests_html import HTMLSession +import whois +from datetime import datetime +import ssl +import socket +from Colors import get_color +import time + +suspicious_points = 0 + + +def cmdsnr_intro(): + print(f"The cmdsnr Checker uses {get_color('BLUE')}Age, Certificates, Page Parsing (and more..){get_color('RESEt')} techniques.\n By the end you will receive the amount of 'suspicious' points gathered.") + +def check_ssl_certificate(domain): + global suspicious_points + context = ssl.create_default_context() + try: + # Added timeout parameter to the create_connection call + with socket.create_connection((domain, 443), timeout=10) as sock: + with context.wrap_socket(sock, server_hostname=domain) as ssock: + certificate = ssock.getpeercert() + + country_name = state_or_province = locality = organization = common_name = dns_names = "" + + # Parsing Subject + for subject in certificate['subject']: + for key, value in subject: + if key == 'countryName': + country_name = value + elif key == 'stateOrProvinceName': + state_or_province = value + elif key == 'localityName': + locality = value + elif key == 'organizationName': + organization = value + elif key == 'commonName': + common_name = value + + dns_entries = [entry[1] for entry in certificate.get('subjectAltName', ())] + dns_names = ", ".join(dns_entries) + + print(f"{get_color('BLUE')}Valid SSL certificate found{get_color('RESET')} for {domain}.") + print("\nInfos:") + print(f"Country Name: {country_name}") + print(f"State or Province: {state_or_province}") + print(f"Locality (City): {locality}") + print(f"Organization: {organization}") + print(f"Common Name: {common_name}") + print(f"DNS Names: {dns_names}\n\n") + return True + except Exception as e: + print(f"SSL check failed for {domain}: {e}") + suspicious_points += 2 + return False + +def analyze_website(url): + cmdsnr_intro() + if not (url.startswith('http://') or url.startswith('https://')): + url = 'https://' + url + time.sleep(5) + global suspicious_points + domain_name = url.split("//")[-1].split("/")[0] + is_certificate_valid = check_ssl_certificate(domain_name) + if is_certificate_valid: + print(f"SSL certificate has been {get_color('BLUE')}found{get_color('RESET')} for {domain_name}") + else: + print(f"{get_color('RED')}No SSL certificate found{get_color('RESET')} for {domain_name}") + time.sleep(5) + try: + domain_info = whois.whois(domain_name) + creation_date = domain_info.creation_date + if isinstance(creation_date, list): + creation_date = creation_date[0] + age_years = (datetime.now() - creation_date).days / 365.25 + print(f"Domain {domain_name} is approximately {age_years:.2f} years old.") + if age_years < 1: + print(f"Domain is less than a year old. {get_color('RED')}+ 3 Suspicious.{get_color('RESET')}\n") + suspicious_points += 3 + except Exception as e: + print(f"Error retrieving WHOIS data for {domain_name}, a suspicious point will be added.\n") + suspicious_points += 1 + time.sleep(5) + if len(url) >= 20: + print(f"Link exceeds 20 characters. {get_color('RED')}+ Suspicious{get_color('RESET')}\n") + suspicious_points += 1 + else: + print(f"{get_color('BLUE')}Passed{get_color('RESET')} character Test\n") + session = HTMLSession() + try: + r = session.get(url, timeout=30) + r.html.render(sleep=1, timeout=30) + password_fields = r.html.find('input[type="password"]') + if password_fields: + print(f"The website seems to have a login feature. {get_color('RED')}+ Suspicious{get_color('RESET')}\n") + suspicious_points += 2 + else: + print("No obvious login feature found.\n") + except Exception as e: + print(f"An error occurred while trying to analyze the website: {e}\n Suspicious point will be added.\n") + suspicious_points += 1 + finally: + session.close() + print(f"Suspicious Points (cmdsnr Checker): {get_color('RED')}{suspicious_points}{get_color('RESET')}\n") + time.sleep(3) + if suspicious_points >= 5: + print(f"The website exceeds {suspicious_points} suspicious points. Deemed {get_color('RED')} possibly not safe.{get_color('RESET')}\n") + elif suspicious_points >= 3: + print(f"There might be some risk involved visiting {url} as it has gathered {suspicious_points} suspicious points.\n If there is a login feature found / involved it is generally not recommended to share any of your personal informations.\n If some of the features failed to execute such as the whois or or auth checker, it should raise suspicions as well.") + else: + print(f"{get_color('BLUE')}No direct threats found by cmdsnr Checker.{get_color('RESET')}\n") diff --git a/ipqualityscore.py b/ipqualityscore.py new file mode 100644 index 0000000..e000f43 --- /dev/null +++ b/ipqualityscore.py @@ -0,0 +1,40 @@ +import requests +from urllib.parse import urlparse +from Colors import get_color + + +def get_domain_from_url(url): + parsed_url = urlparse(url) + domain = parsed_url.netloc or parsed_url.path + domain = domain.split('@')[-1] # Remove any user info + domain = domain.split(':')[0] # Remove any port info + domain = domain.rstrip('/') # Remove trailing slash if present + return domain + +def check_ipqualityscore(original_url, api_key): + domain = get_domain_from_url(original_url) + api_url = f"https://ipqualityscore.com/api/json/url/{api_key}/{domain}" + + try: + response = requests.get(api_url) + if response.status_code == 200: + data = response.json() + # Extract only the required attributes + filtered_data = { + f"{get_color('RED')}Unsafe{get_color('RESET')}": data.get('unsafe'), + f"{get_color('RED')}Malware{get_color('RESET')}": data.get('malware'), + f"{get_color('RED')}Phishing{get_color('RESET')}": data.get('phishing'), + f"{get_color('RED')}Risk Score (From 0 to 100){get_color('RESET')}": data.get('risk_score'), + "Category": data.get('category'), + "Domain": data.get('domain'), + "IP Address": data.get('ip_address'), + f"{get_color('RED')}Suspicious{get_color('RESET')}": data.get('suspicious'), + "Country Code": data.get('country_code'), + } + return filtered_data + else: + return {"Error": f"Received a {response.status_code} status code from the API."} + except Exception as e: + return {"Error": f"An exception occurred: {str(e)}."} + + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..b8ea117 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +requests +prettytable +requests_html +whois \ No newline at end of file diff --git a/urlio.py b/urlio.py new file mode 100644 index 0000000..1117bf7 --- /dev/null +++ b/urlio.py @@ -0,0 +1,90 @@ +import requests +import json +import time +from Colors import get_color + +def urlscan_io_analysis(url, api_key_urlscan): + submit_url = 'https://urlscan.io/api/v1/scan/' + headers = {'API-Key': api_key_urlscan, 'Content-Type': 'application/json'} + data = {'url': url, 'visibility': 'public'} + response = requests.post(submit_url, headers=headers, data=json.dumps(data)) + + if response.status_code == 200: + response_data = response.json() + scan_uuid = response_data['uuid'] + print(f"URLScan.io scan submitted successfully. Scan UUID: {scan_uuid}") + + max_retries = 10 + attempt = 0 + while attempt < max_retries: + attempt += 1 + print(f"\rWaiting for URLScan.io results. Attempt {attempt}/{max_retries}...", end='', flush=True) + time.sleep(10) # Wait for 10 seconds before checking again + result_url = f'https://urlscan.io/api/v1/result/{scan_uuid}/' + result_response = requests.get(result_url) + + if result_response.status_code == 200: + result_data = result_response.json() + page_country = result_data.get('page', {}).get('country', 'Country not available') + server_ip = result_data.get('page', {}).get('ip', 'IP not available') + print(f"\nServer IP: {server_ip}, Page Country: {page_country}") + + verdicts = result_data.get('verdicts', {}) + if 'overall' in verdicts: + overall_verdict = verdicts['overall'] + is_malicious = overall_verdict.get('malicious', False) + print(f"{get_color('RED')}The website is deemed malicious by URLScan.io.{get_color('RESET')}" if is_malicious else "The website is not deemed malicious | has no classification by URLScan.io.") + else: + print("The website has no classification by URLScan.io.") + return + elif attempt == max_retries: + print("\nFinal attempt to retrieve results failed. Moving on.") + return + else: + print("", end='', flush=True) + continue + + else: + print(f"\nFailed to submit URL for scanning to URLScan.io. Status code: {response.status_code}") + + +def urlIO(): + api_key = 'b9e82d19-c7ca-478d-9e98-1bc94f514168' + url_to_scan = 'http://joicedate.com/' + submit_url = 'https://urlscan.io/api/v1/scan/' + headers = {'API-Key': api_key, 'Content-Type': 'application/json'} + data = {'url': url_to_scan, 'visibility': 'public'} + + response = requests.post(submit_url, headers=headers, data=json.dumps(data)) + + if response.status_code == 200: + response_data = response.json() + scan_uuid = response_data['uuid'] + print(f"Scan submitted successfully. Scan UUID: {scan_uuid}") + max_retries = 6 + attempt = 0 + while attempt < max_retries: + print(f"Attempt {attempt + 1}/{max_retries}: Waiting for the scan to complete...") + time.sleep(10) + result_url = f'https://urlscan.io/api/v1/result/{scan_uuid}/' + result_response = requests.get(result_url) + if result_response.status_code == 200: + result_data = result_response.json() + page_country = result_data.get('page', {}).get('country', 'Country not available') + server_ip = result_data.get('page', {}).get('ip', 'IP not available') + print(f"Server IP: {server_ip}, Page Country: {page_country}") + if 'verdicts' in result_data and 'overall' in result_data['verdicts']: + overall_verdict = result_data['verdicts']['overall'] + if 'malicious' in overall_verdict and overall_verdict['malicious']: + print("The website is deemed malicious.") + else: + print("The website is not deemed malicious based on the overall verdict.") + break + else: + print("Scan is not finished yet or another error occurred.") + attempt += 1 + if attempt == max_retries: + print("Exceeded maximum retry attempts without successful scan completion.") + else: + print("Failed to submit URL for scanning") + print(response.text)