diff --git a/tools/pe-sieve32.exe b/tools/pe-sieve32.exe index 9d0bb984..2ede93ad 100644 Binary files a/tools/pe-sieve32.exe and b/tools/pe-sieve32.exe differ diff --git a/tools/pe-sieve64.exe b/tools/pe-sieve64.exe index ae21b525..b9ece7ff 100644 Binary files a/tools/pe-sieve64.exe and b/tools/pe-sieve64.exe differ diff --git a/tools/vt-checker-hosts.py b/tools/vt-checker-hosts.py deleted file mode 100644 index c68bff3d..00000000 --- a/tools/vt-checker-hosts.py +++ /dev/null @@ -1,653 +0,0 @@ -#!/usr/bin/env python2.7 -"""Checks IPs and Hosts read from an input file on Virustotal""" -from __future__ import print_function - -__AUTHOR__ = 'Florian Roth' -__VERSION__ = "0.2 July 2017" - -""" -Install dependencies with: -pip install simplejson colorama IPy pickle pycurl -""" - -import simplejson, json -import signal -import urllib -import urllib2 -import pycurl -import StringIO -import urlparse -import platform -import time -import re -import os -import sys -import traceback -import subprocess -import argparse -import socket -import pickle -from IPy import IP -from colorama import init, Fore, Back, Style - -URLS = {'ip': r'https://www.virustotal.com/vtapi/v2/ip-address/report', - 'domain': r'https://www.virustotal.com/vtapi/v2/domain/report'} -API_KEY = '-' -WAIT_TIME = 15 # Public API allows 4 request per minute, so we wait 15 secs by default -IP_WHITE_LIST = ['1.0.0.127', '127.0.0.1'] -OWNER_WHITE_LIST = ['Google Inc.', 'Facebook, Inc.', 'CloudFlare, Inc.', 'Microsoft Corporation', - 'Akamai Technologies, Inc.'] # not yet used -DOMAIN_WHITE_LIST = ['sourceforge.net'] -RES_TARGETS = {'ip': 'hostname', 'domain': 'ip_address'} - - -def fetch_ip_and_domains(line): - """ - Extracts IPs and Domains from a log line - """ - domains = [] - # Modify line to easily extract IPs and Domains from reports - # get 183.200.23[.]213 - mod_line = line.replace("[", "").replace("]", "") - ip_pattern = r'\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b' - ips = re.findall(ip_pattern, mod_line) - domain_pattern = r'\b(?=.{4,253}$)(((?!-)[a-zA-Z0-9-]{1,63}(? 0: - - # Predefine Rating - rating = "clean" - - # Other Info - owner = response_dict.get("as_owner") - country = response_dict.get("country") - - # WHITE LIST CHECKS - if owner: - for owl in OWNER_WHITE_LIST: - if owl in owner: - print_highlighted("Owner white-listed - skipping this host OWNER: %s" % owner) - - # Resolutions - if 'resolutions' in response_dict: - resolution_list = response_dict['resolutions'] - for i, res in enumerate(resolution_list): - resolutions.append({'target': res[RES_TARGETS[cat]], 'last_resolved': res['last_resolved']}) - if i < max_items: - print_highlighted("HOST: {0} LAST_RESOLVED: {1}".format(res[RES_TARGETS[cat]], - res['last_resolved'])) - else: - if "hosts" not in shown_messages: - sys.stdout.write("Others found: ") - shown_messages["hosts"] = True - sys.stdout.write(".") - # Add the IP to the elements - if args.recursive: - new_value = res[RES_TARGETS[cat]] - if is_ip(new_value): - elements.append({'value': new_value, 'type': 'ip'}) - else: - elements.append({'value': new_value, 'type': 'domain'}) - if "hosts" in shown_messages: - sys.stdout.write("\n") - - # URL matches - if 'detected_urls' in response_dict: - detected_urls = response_dict['detected_urls'] - for i, url in enumerate(detected_urls): - positives_url = url['positives'] - total_url = url['total'] - urls.append({'url': url['url'], 'positives': positives_url, 'total': total_url}) - if i < max_items and args.download: - print_highlighted("URL: {0} POSITIVES: {1} TOTAL: {2}".format(url['url'], - positives_url, - total_url)) - else: - if "urls" not in shown_messages: - sys.stdout.write("Others found: ") - shown_messages["urls"] = True - sys.stdout.write(".") - positives += positives_url - total += total_url - # Download URL - if args.download: - download_url(value, url['url']) - if "urls" in shown_messages: - sys.stdout.write("\n") - - # Samples - if 'detected_communicating_samples' in response_dict: - samples_list = response_dict['detected_communicating_samples'] - for i, sample in enumerate(samples_list): - positives_sample = sample['positives'] - total_sample = sample['total'] - date = sample['date'] - sha256 = sample['sha256'] - samples.append({'sample': sha256, 'positives': positives_sample, 'total': total_sample, - 'date': date}) - if i < max_items: - print_highlighted("SAMPLE: {0} POSITIVES: {1} TOTAL: {2} " - "DATE: {3}".format(sha256, positives_sample, total_sample, date)) - else: - if "samples" not in shown_messages: - sys.stdout.write("Others found: ") - shown_messages["samples"] = True - sys.stdout.write(".") - sample_positives += positives_sample - sample_total += total_sample - if "samples" in shown_messages: - sys.stdout.write("\n") - - # Calculations ------------------------------------------------------------------------------------- - # Rating - # Calculate ratio - if positives > 0 and total > 0: - ratio = (float(positives) / float(total)) * 100 - # Set rating - if ratio > 3 and rating == "clean": - rating = "suspicious" - if ratio > 10 and (rating == "clean" or rating == "suspicious"): - rating = "malicious" - - # Type - res_color = Back.GREEN - if rating == "suspicious": - res_color = Back.YELLOW - if rating == "malicious": - res_color = Back.RED - - # Result ------------------------------------------------------------------------------------------- - result = "%s / %s" % (positives, total) - print_highlighted("COUNTRY: {0} OWNER: {1}".format(country, owner)) - print_highlighted("POSITIVES: %s RATING: %s" % (result, rating), hl_color=res_color) - - else: - # Print the highlighted result line - print_highlighted("POSITIVES: %s RATING: %s" % (result, rating), hl_color=res_color) - - # CSV OUTPUT ------------------------------------------------------------------------------------------- - # Add to log file - if not nocsv: - # Hosts string - targets = [] - for r in resolutions: - targets.append(r['target']) - targets_value = ', '.join(targets) - # Malicious samples - mal_samples = [] - for s in samples: - if s['positives'] > 3: - mal_samples.append(s['sample']) - samples_value = ', '.join(mal_samples) - # urls = ', '.join("%s=%r" % (key,val) for (key,val) in urls.iteritems()) - # samples = ', '.join("%s=%r" % (key,val) for (key,val) in samples.iteritems()) - result_line = "{0};{1};{2};{3};{4};{5};{6};{7}\n".format(value, rating, owner, country, - positives, total, - samples_value, targets_value) - with open(result_file, "a") as fh_results: - fh_results.write(result_line) - - # Add to cache ----------------------------------------------------------------------------------------- - cache[value] = result - - # Wait ------------------------------------------------------------------------------------------------- - # Wait some time for the next request - time.sleep(WAIT_TIME) - - -def download_url(host_id, url): - """ - Downloads an URL and stores the response to a directory with named as the host/IP - :param host_id: - :param url: - :return: - """ - output = StringIO.StringIO() - header = StringIO.StringIO() - - print("[>] Trying to download URL: %s" % url) - # Download file - try: - # 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)') - c = pycurl.Curl() - c.setopt(c.URL, url) - c.setopt(pycurl.CONNECTTIMEOUT, 10) - c.setopt(pycurl.TIMEOUT, 180) - c.setopt(pycurl.FOLLOWLOCATION, 1) - c.setopt(pycurl.USERAGENT, 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)') - c.setopt(c.WRITEFUNCTION, output.write) - c.setopt(c.HEADERFUNCTION, header.write) - c.perform() - # Header parsing - header_info = header_function(header.getvalue()) - except Exception as e: - if args.debug: - traceback.print_exc() - print_highlighted("[-] Error MESSAGE: %s" % str(e)) - - # Write File - if c.getinfo(c.RESPONSE_CODE) == 200: - # Check folder - out_path = os.path.join(os.path.abspath(args.o), host_id) - if not os.path.exists(out_path): - os.makedirs(out_path) - # Output file name - r = urlparse.urlparse(url) - filename = r.path.split("/")[-1] - if filename == "": - if r.path != "/": - out_path = os.path.join(out_path, r.path.lstrip("/")) - filename = "index.dat" - out_filename = os.path.join(os.path.abspath(out_path), filename) - # Write file - try: - with open(out_filename, 'wb') as f: - f.write(output.getvalue()) - print_highlighted("[+] Successfully saved to FILE: %s" % out_filename) - except Exception as e: - if args.debug: - traceback.print_exc() - print("[-] Failed to write file %s (use --debug for more info)" % out_filename) - else: - try: - print_highlighted("[i] Response CODE: %s MIME_TYPE: %s SIZE: %s" % ( - str(c.getinfo(c.RESPONSE_CODE)), - header_info['content-type'], - header_info['content-length']) - ) - except Exception as e: - print_highlighted("[-] Response CODE: %s" % str(c.getinfo(c.RESPONSE_CODE))) - - output.close() - - -def header_function(header_raw): - """ - Process header info - Example from pycurl quick start guide http://pycurl.io/docs/latest/quickstart.html - :param header_line: - :return: - """ - headers = {} - header_lines = header_raw.splitlines() - - for header_line in header_lines: - - # HTTP standard specifies that headers are encoded in iso-8859-1. - # On Python 2, decoding step can be skipped. - # On Python 3, decoding step is required. - header_line = header_line.decode('iso-8859-1') - - # Header lines include the first status line (HTTP/1.x ...). - # We are going to ignore all lines that don't have a colon in them. - # This will botch headers that are split on multiple lines... - if ':' not in header_line: - continue - - # Break the header line into header name and value. - name, value = header_line.split(':', 1) - - # Remove whitespace that may be present. - # Header lines include the trailing newline, and there may be whitespace - # around the colon. - name = name.strip() - value = value.strip() - - # Header names are case insensitive. - # Lowercase name here. - name = name.lower() - - # Now we can actually record the header name and value. - headers[name] = value - - return headers - - -def signal_handler(signal, frame): - print("\n[+] Saving {0} cache entries to file {1}".format(len(cache), args.c)) - saveCache(cache, args.c) - sys.exit(0) - - -if __name__ == '__main__': - - signal.signal(signal.SIGINT, signal_handler) - init(autoreset=False) - - print(Style.RESET_ALL + Fore.WHITE + Back.BLUE) - print(" ".ljust(80)) - print(" _ ________ _______ __ ".ljust(80)) - print(" | | / /_ __/ / ___/ / ___ ____/ /_____ ____ ".ljust(80)) - print(" | |/ / / / / /__/ _ \/ -_) __/ '_/ -_) __/ ".ljust(80)) - print(" |___/ /_/ \___/_//_/\\__/\__/_/\_\\__/_/ ".ljust(80)) - print(" ".ljust(80)) - print(" IP and Domain Version ".ljust(80)) - print((" " + __AUTHOR__ + " - " + __VERSION__ + "").ljust(80)) - print(" ".ljust(80) + Style.RESET_ALL) - print(Style.RESET_ALL + " ") - - parser = argparse.ArgumentParser(description='Virustotal Online Checker (IP/Domain)') - parser.add_argument('-f', help='File to process (hash line by line OR csv with hash in each line - auto-detects ' - 'position and comment)', metavar='path', default='') - parser.add_argument('-m', help='Maximum number of items (urls, hosts, samples) to show', metavar='max-items', - default=10) - parser.add_argument('-c', help='Name of the cache database file (default: vt-check-db.pkl)', metavar='cache-db', - default='vt-check-db.pkl') - parser.add_argument('--nocache', action='store_true', help='Do not use the load the cache db (vt-check-cache.pkl)', - default=False) - parser.add_argument('--nocsv', action='store_true', help='Do not write a CSV with the results', default=False) - parser.add_argument('--recursive', action='store_true', help='Process the resolved IPs as well', default=False) - parser.add_argument('--download', action='store_true', - help='Try to download the URLs (directories with host/ip names)', default=False) - parser.add_argument('-o', help='Store the downloads to the given directory', metavar='output-folder', - default='./') - parser.add_argument('--dups', action='store_true', help='Do not skip duplicate hashes', default=False) - parser.add_argument('--noresolve', action='store_true', help='Do not perform DNS resolve test on found domain ' - 'names', default=False) - parser.add_argument('--ping', action='store_true', help='Perform ping check on IPs (speeds up process if many ' - 'public but internally routed IPs appear in text file)', - default=False) - parser.add_argument('--debug', action='store_true', default=False, help='Debug output') - - args = parser.parse_args() - - # Check API Key - if API_KEY == '': - print("[E] No API Key set") - print(" Include your API key in the header section of the script (API_KEY)\n") - print(" More info:") - print(" https://www.virustotal.com/en/faq/#virustotal-api\n") - sys.exit(1) - - # Check input file - if args.f == '': - print("[E] Please provide an input file with '-f inputfile'\n") - parser.print_help() - sys.exit(1) - if not os.path.exists(args.f): - print("[E] Cannot find input file {0}".format(args.f)) - sys.exit(1) - - # Caches - cache = {} - # Trying to load cache from pickle dump - if not args.nocache: - cache, success = loadCache(args.c) - if success: - print("[+] {0} cache entries read from cache database: {1}".format(len(cache), args.c)) - else: - print("[-] No cache database found") - print("[+] Analyzed IPs/domains will be written to cache database: {0}".format(args.c)) - print("[+] You can always interrupt the scan by pressing CTRL+C without losing the scan state") - - # Open input file - try: - with open(args.f, 'r') as fh_input: - lines = fh_input.readlines() - except Exception as e: - print("[E] Cannot read input file") - sys.exit(1) - - # Result file - if not args.nocsv: - result_file = "check-results_{0}.csv".format(os.path.splitext(os.path.basename(args.f))[0]) - if os.path.exists(result_file): - print("[+] Found results CSV from previous run: {0}".format(result_file)) - print("[+] Appending results to file: {0}".format(result_file)) - else: - print("[+] Writing results to new file: {0}".format(result_file)) - try: - with open(result_file, 'w') as fh_results: - fh_results.write( - "IP;Rating;Owner;Country Code;Log Line No;Positives;Total;Malicious Samples;Hosts\n") - except Exception as e: - print("[E] Cannot write CSV export file: {0}".format(result_file)) - - # Process the input lines - elements = process_lines(lines, args.debug) - - # Process elements - process_elements(elements, result_file, int(args.m), args.nocsv, args.dups, args.noresolve, args.ping, - args.debug) - - # Write Cache - print("\n[+] Saving {0} cache entries to file {1}".format(len(cache), args.c)) - saveCache(cache, args.c) - - print(Style.RESET_ALL) diff --git a/tools/vt-checker.py b/tools/vt-checker.py deleted file mode 100644 index 15208415..00000000 --- a/tools/vt-checker.py +++ /dev/null @@ -1,426 +0,0 @@ -#!/usr/bin/env python2.7 -"""Checks Hashes read from an input file on Virustotal""" -from __future__ import print_function - -__AUTHOR__ = 'Florian Roth' -__VERSION__ = "0.10 September 2017" - -""" -Modified by Hannah Ward: clean up, removal of simplejson, urllib2 with requests - -Install dependencies with: -pip install requests bs4 colorama -""" - -import requests -import time -import re -import os -import signal -import sys -import pickle -from bs4 import BeautifulSoup -import traceback -import argparse -from colorama import init, Fore, Back, Style - -URL = r'https://www.virustotal.com/vtapi/v2/file/report' -VENDORS = ['Microsoft', 'Kaspersky', 'McAfee', 'CrowdStrike', 'TrendMicro', - 'ESET-NOD32', 'Symantec', 'F-Secure', 'Sophos', 'GData'] -API_KEY = '-' -WAIT_TIME = 15 # Public API allows 4 request per minute, so we wait 15 secs by default - - -def fetch_hash(line): - pattern = r'(? 0: - hash = hash_search[-1] - rest = ' '.join(re.sub('({0}|;|,|:)'.format(hash), ' ', line).strip().split()) - - return hash, rest - return '', '' - - -def print_highlighted(line, hl_color=Back.WHITE): - """ - Print a highlighted line - """ - # Tags - colorer = re.compile('(HARMLESS|SIGNED|MS_SOFTWARE_CATALOGUE)', re.VERBOSE) - line = colorer.sub(Fore.BLACK + Back.GREEN + r'\1' + Style.RESET_ALL + ' ', line) - colorer = re.compile('(SIG_REVOKED)', re.VERBOSE) - line = colorer.sub(Fore.BLACK + Back.RED + r'\1' + Style.RESET_ALL + ' ', line) - colorer = re.compile('(SIG_EXPIRED)', re.VERBOSE) - line = colorer.sub(Fore.BLACK + Back.YELLOW + r'\1' + Style.RESET_ALL + ' ', line) - # Extras - colorer = re.compile('(\[!\])', re.VERBOSE) - line = colorer.sub(Fore.BLACK + Back.CYAN + r'\1' + Style.RESET_ALL + ' ', line) - # Standard - colorer = re.compile('([A-Z_]{2,}:)\s', re.VERBOSE) - line = colorer.sub(Fore.BLACK + hl_color + r'\1' + Style.RESET_ALL + ' ', line) - print(line) - - -def process_permalink(url, debug=False): - """ - Requests the HTML page for the sample and extracts other useful data - that is not included in the public API - """ - headers = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)', - 'Referrer': 'https://www.virustotal.com/en/'} - info = {'filenames': ['-'], 'firstsubmission': '-', 'harmless': False, 'signed': False, 'revoked': False, - 'expired': False, 'mssoft': False, 'imphash': '-', 'filetype': '-'} - try: - source_code = requests.get(url, headers=headers) - # Extract info from source code - soup = BeautifulSoup(source_code.text, 'html.parser') - # Get file names - elements = soup.find_all('td') - for i, row in enumerate(elements): - text = row.text.strip() - if text == "File names": - file_names = elements[i + 1].text.strip().split("\n") - info['filenames'] = filter(None, map(lambda file: file.strip(), file_names)) - # Get file names - elements = soup.find_all('div') - for i, row in enumerate(elements): - text = row.text.strip() - if text.startswith('File type'): - info['filetype'] = elements[i].text[10:].strip() - # Get additional information - elements = soup.findAll("div", {"class": "enum"}) - for i, row in enumerate(elements): - text = row.text.strip() - if 'First submission' in text: - first_submission_raw = elements[i].text.strip().split("\n") - info['firstsubmission'] = first_submission_raw[1].strip() - if 'imphash' in text: - info['imphash'] = elements[i].text.strip().split("\n")[-1].strip() - # Harmless - if "Probably harmless!" in source_code: - info['harmless'] = True - # Signed - if "Signed file, verified signature" in source_code: - info['signed'] = True - # Revoked - if "revoked by its issuer" in source_code: - info['revoked'] = True - # Expired - if "Expired certificate" in source_code: - info['expired'] = True - # Microsoft Software - if "This file belongs to the Microsoft Corporation software catalogue." in source_code: - info['mssoft'] = True - except Exception as e: - if debug: - traceback.print_exc() - finally: - # Return the info dictionary - return info - - -def saveCache(cache, fileName): - """ - Saves the cache database as pickle dump to a file - :param cache: - :param fileName: - :return: - """ - with open(fileName, 'wb') as fh: - pickle.dump(cache, fh, pickle.HIGHEST_PROTOCOL) - - -def loadCache(fileName): - """ - Load cache database as pickle dump from file - :param fileName: - :return: - """ - try: - with open(fileName, 'rb') as fh: - return pickle.load(fh), True - except Exception as e: - # traceback.print_exc() - return {}, False - - -def removeNonAsciiDrop(string): - nonascii = "error" - # print "CON: ", string - try: - # Generate a new string without disturbing characters and allow new lines - nonascii = "".join(i for i in string if (ord(i) < 127 and ord(i) > 31) or ord(i) == 10 or ord(i) == 13) - except Exception as e: - traceback.print_exc() - pass - return nonascii - - -def signal_handler(signal, frame): - print("\n[+] Saving {0} cache entries to file {1}".format(len(cache), args.c)) - saveCache(cache, args.c) - sys.exit(0) - - -def process_lines(lines, result_file, nocsv=False, dups=False, debug=False): - """ - Process the input file line by line - """ - - # Some statistics that could help find similarities - imphashes = {} - - for line in lines: - - # Skip comments - if line.startswith("#"): - continue - - # Remove line break - line.rstrip("\n\r") - - # Get all hashes in line - # ... and the rest of the line as comment - hashVal, comment = fetch_hash(line) - - # If no hash found - if hashVal == '': - continue - - # Cache - if hashVal in cache: - if dups: - # Colorized head of each hash check - print_highlighted("\nHASH: {0} COMMENT: {1}".format(hashVal, comment)) - print_highlighted("RESULT: %s (from cache)" % cache[hashVal]) - continue - else: - # Colorized head of each hash check - print_highlighted("\nHASH: {0} COMMENT: {1}".format(hashVal, comment)) - - # Prepare VT API request - parameters = {"resource": hashVal, "apikey": API_KEY} - success = False - while not success: - try: - response_dict = requests.get(URL, params=parameters).json() - success = True - except Exception as e: - if debug: - traceback.print_exc() - # print "Error requesting VT results" - pass - - # Process results - result = "- / -" - virus = "-" - last_submitted = "-" - first_submitted = "-" - filenames = "-" - filetype = "-" - rating = "unknown" - positives = 0 - res_color = Back.CYAN - md5 = "-" - sha1 = "-" - sha256 = "-" - imphash = "-" - harmless = "" - signed = "" - revoked = "" - expired = "" - mssoft = "" - vendor_result_string = "-" - - if response_dict.get("response_code") > 0: - # Hashes - md5 = response_dict.get("md5") - sha1 = response_dict.get("sha1") - sha256 = response_dict.get("sha256") - # AV matches - positives = response_dict.get("positives") - total = response_dict.get("total") - last_submitted = response_dict.get("scan_date") - # Virus Name - scans = response_dict.get("scans") - virus_names = [] - vendor_results = [] - for vendor in VENDORS: - if vendor in scans: - if scans[vendor]["result"]: - virus_names.append("{0}: {1}".format(vendor, scans[vendor]["result"])) - vendor_results.append(scans[vendor]["result"]) - else: - vendor_results.append("-") - else: - vendor_results.append("-") - vendor_result_string = ";".join(vendor_results) - if len(virus_names) > 0: - virus = " / ".join(virus_names) - # Type - rating = "clean" - res_color = Back.GREEN - if positives > 0: - rating = "suspicious" - res_color = Back.YELLOW - if positives > 10: - rating = "malicious" - res_color = Back.RED - # Get more information with permalink - if debug: - print("[D] Processing permalink {0}".format(response_dict.get("permalink"))) - info = process_permalink(response_dict.get("permalink"), debug) - # File Names - filenames = removeNonAsciiDrop(", ".join(info['filenames'][:5]).replace(';', '_')) - first_submitted = info['firstsubmission'] - # Other info - filetype = info['filetype'] - imphash = info['imphash'] - if imphash != "-": - if imphash in imphashes: - print_highlighted("[!] Imphash seen in %d samples " - "https://totalhash.cymru.com/search/?hash:%s" % - (imphashes[imphash], imphash), hl_color=res_color) - imphashes[imphash] += 1 - else: - imphashes[imphash] = 1 - # Result - result = "%s / %s" % (response_dict.get("positives"), response_dict.get("total")) - print_highlighted("VIRUS: {0}".format(virus)) - print_highlighted("FILENAMES: {0}".format(filenames)) - print_highlighted("FILE_TYPE: {2} FIRST_SUBMITTED: {0} LAST_SUBMITTED: {1}".format( - first_submitted, last_submitted, filetype)) - - # Permalink analysis results - if info['harmless']: - harmless = " HARMLESS" - if info['signed']: - signed = " SIGNED" - if info['revoked']: - revoked = " SIG_REVOKED" - if info['expired']: - expired = " SIG_EXPIRED" - if info["mssoft"]: - mssoft = "MS_SOFTWARE_CATALOGUE" - - # Print the highlighted result line - print_highlighted("RESULT: %s %s%s%s%s%s" % (result, harmless, signed, revoked, expired, mssoft), - hl_color=res_color) - - # Add to log file - if not nocsv: - result_line = "{0};{1};{2};{3};{4};{5};{6};{7};" \ - "{8};{9};{10};{11};{12};{13};{14};{15};{16};{17}\n".format(hashVal, rating, comment, - positives, - virus, filenames, - first_submitted, - last_submitted, - filetype, - md5, sha1, sha256, imphash, - harmless.lstrip(' '), - signed.lstrip(' '), - revoked.lstrip(' '), - expired.lstrip(' '), - vendor_result_string) - with open(result_file, "a") as fh_results: - fh_results.write(result_line) - - # Add to hash cache - cache[hashVal] = result - - # Wait some time for the next request - time.sleep(WAIT_TIME) - - -if __name__ == '__main__': - - signal.signal(signal.SIGINT, signal_handler) - init(autoreset=False) - - print(Style.RESET_ALL) - print(Fore.WHITE + Back.BLUE) - print(" ".ljust(80)) - print(" _ ________ _______ __ ".ljust(80)) - print(" | | / /_ __/ / ___/ / ___ ____/ /_____ ____ ".ljust(80)) - print(" | |/ / / / / /__/ _ \/ -_) __/ '_/ -_) __/ ".ljust(80)) - print(" |___/ /_/ \___/_//_/\\__/\__/_/\_\\__/_/ ".ljust(80)) - print(" ".ljust(80)) - print((" " + __AUTHOR__ + " - " + __VERSION__ + "").ljust(80)) - print(" ".ljust(80) + Style.RESET_ALL) - print(Style.RESET_ALL + " ") - - parser = argparse.ArgumentParser(description='Virustotal Online Checker') - parser.add_argument('-f', help='File to process (hash line by line OR csv with hash in each line - auto-detects ' - 'position and comment)', metavar='path', default='') - parser.add_argument('-c', help='Name of the cache database file (default: vt-hash-db.pkl)', metavar='cache-db', - default='vt-hash-db.pkl') - parser.add_argument('--nocache', action='store_true', help='Do not use cache database file', default=False) - parser.add_argument('--nocsv', action='store_true', help='Do not write a CSV with the results', default=False) - parser.add_argument('--dups', action='store_true', help='Do not skip duplicate hashes', default=False) - parser.add_argument('--debug', action='store_true', default=False, help='Debug output') - - args = parser.parse_args() - - # Check API Key - if API_KEY == '': - print("[E] No API Key set") - print(" Include your API key in the header section of the script (API_KEY)\n") - print(" More info:") - print(" https://www.virustotal.com/en/faq/#virustotal-api\n") - sys.exit(1) - - # Check input file - if args.f == '': - print("[E] Please provide an input file with '-f inputfile'\n") - parser.print_help() - sys.exit(1) - if not os.path.exists(args.f): - print("[E] Cannot find input file {0}".format(args.f)) - sys.exit(1) - - # Caches - cache = {} - # Trying to load cache from pickle dump - if not args.nocache: - cache, success = loadCache(args.c) - if success: - print("[+] {0} cache entries read from cache database: {1}".format(len(cache), args.c)) - else: - print("[-] No cache database found") - print("[+] Analyzed hashes will be written to cache database: {0}".format(args.c)) - print("[+] You can always interrupt the scan by pressing CTRL+C without losing the scan state") - - # Open input file - try: - with open(args.f, 'rU') as fh: - lines = fh.readlines() - except Exception as e: - print("[E] Cannot read input file ") - sys.exit(1) - - # Result file - # Result file - if not args.nocsv: - result_file = "check-results_{0}.csv".format(os.path.splitext(os.path.basename(args.f))[0]) - if os.path.exists(result_file): - print("[+] Found results CSV from previous run: {0}".format(result_file)) - print("[+] Appending results to file: {0}".format(result_file)) - else: - print("[+] Writing results to new file: {0}".format(result_file)) - try: - with open(result_file, 'w') as fh_results: - fh_results.write("Lookup Hash;Rating;Comment;Positives;Virus;File Names;First Submitted;" - "Last Submitted;MD5;SHA1;SHA256;ImpHash;Harmless;Signed;Revoked;Expired;" - "{0}\n".format(";".join(VENDORS))) - except Exception as e: - print("[E] Cannot write export file {0}".format(result_file)) - - # Process the input lines - process_lines(lines, result_file, args.nocsv, args.dups, args.debug) - - # Write Cache - print("\n[+] Saving {0} cache entries to file {1}".format(len(cache), args.c)) - saveCache(cache, args.c) - - print(Style.RESET_ALL)