diff --git a/cobra/__version__.py b/cobra/__version__.py index d24f86a7..466335a4 100644 --- a/cobra/__version__.py +++ b/cobra/__version__.py @@ -29,3 +29,24 @@ python {m} -t {tg} -f json -o http://push.to.com/api python {m} -H 127.0.0.1 -P 8888 """.format(m='cobra.py', td='tests/vulnerabilities', tg='https://github.com/ethicalhack3r/DVWA') + +__introduction_git__ = """ +This script can push your target to the api +Please write cobra_ip, secret_key in config when you want to scan the specified git address +Please write gitlab_url, private_token, cobra_ip, secret_key when you want to scan all gitlab's projects +""" + +__epilog_git__ = """Usage: + python {m} -a + python {m} -a -r cvi-190001,cvi-190002 + python {m} -a -f json -o /tmp/report.json + + python {m} -t {td} + python {m} -t {td},{td1} + python {m} -t {td},{td1} -d + python {m} -t {td} -r cvi-190001,cvi-190002 + python {m} -t {td} -f json -o /tmp/report.json + python {m} -t {tg} -f json -o feei@feei.cn + python {m} -t {tg} -f json -o http://push.to.com/api +""".format(m='git_projcets.py', td='tests/vulnerabilities', td1='tests/dvwa', + tg='https://github.com/ethicalhack3r/DVWA') diff --git a/cobra/cve.py b/cobra/cve.py index 6567e59a..b4f3680b 100644 --- a/cobra/cve.py +++ b/cobra/cve.py @@ -18,6 +18,7 @@ import gzip import xml.etree.cElementTree as eT import multiprocessing +import subprocess from .config import project_directory, Config, config_path from .log import logger from .dependencies import Dependencies @@ -283,6 +284,24 @@ def download_rule_gz(): for t in threads: t.join() end_time = datetime.datetime.now() + for afile in files: + param = ['file', afile] + p = subprocess.Popen(param, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + res_out, res_err = p.communicate() + + res_out = res_out.decode('utf-8') + res_err = res_err.decode('utf-8') + + if 'HTML' in res_out: + os.remove(afile) + afile_name = os.path.split(afile)[1] + year = afile_name.split('.')[0] + url = "https://static.nvd.nist.gov/feeds/xml/cve/2.0/nvdcve-2.0-" + str(year) + ".xml.gz" + try: + urlretrieve(url, afile) + except IOError: + logger.warning('[CVE] The {} download fail'.format(afile)) + logger.info("All CVE xml file already download success, use time:%ds" % (end_time - start_time).seconds) return files @@ -292,11 +311,17 @@ def un_gz(gz_files): start_time = datetime.datetime.now() logger.info("Start decompress rule files, Please wait a moment....") for gz_file in gz_files: - f_name = gz_file.replace(".gz", "") - g_file = gzip.GzipFile(gz_file) - open(f_name, "wb+").write(g_file.read()) - g_file.close() - os.remove(gz_file) + if os.path.exists(gz_file): + f_name = gz_file.replace(".gz", "") + + try: + g_file = gzip.GzipFile(gz_file, "rb") + open(f_name, "wb+").write(g_file.read()) + g_file.close() + except IOError: + logger.warning('[CVE] The {} download fail'.format(gz_file)) + + os.remove(gz_file) end_time = datetime.datetime.now() logger.info("Decompress success, use time:%ds" % (end_time - start_time).seconds) return True diff --git a/git_projects.py b/git_projects.py index 4bcf9da4..d0adbc30 100644 --- a/git_projects.py +++ b/git_projects.py @@ -15,8 +15,10 @@ import requests import re import threading +import argparse from cobra.log import logger from cobra.config import code_path, Config +from cobra.__version__ import __epilog_git__, __introduction_git__ try: # Python 3 @@ -25,40 +27,77 @@ # Python 2 import Queue as queue - git_urls = [] -def start(): +def start(target, format, output, rules, dels, all): + """ + start push target to api + :param target: + :param format: + :param output: + :param rules: + :param dels: + :param all: + :return: + """ url = Config('git', 'gitlab_url').value private_token = Config('git', 'private_token').value cobra_ip = Config('git', 'cobra_ip').value key = Config('cobra', 'secret_key').value threads = [] - pages = get_pages(url, private_token) - q_pages = queue.Queue(pages) result_path = code_path + '/result_sid' - fi = open(result_path, 'w+') - for i in range(int(pages)): - q_pages.put(i + 1) + fi = open(result_path, 'a+') - for i in range(10): - thread = threading.Thread(target=get_git_urls, args=(url, private_token, q_pages, fi)) - thread.start() - threads.append(thread) + try: + if all is False and target is not '': + if isinstance(target, list): + for tar in target: + fi.write(tar + '\n') + else: + fi.write(target + '\n') - for thread in threads: - thread.join() + res = push_to_api(target, cobra_ip, key, fi, format, output, rules, dels) - res = push_to_api(git_urls, cobra_ip, key, fi) + elif all is True and target is '': + pages = get_pages(url, private_token) + q_pages = queue.Queue(pages) - if res: - logger.info("Git push success: {}".format(len(git_urls))) - else: - logger.info("Git push fail") + for i in range(int(pages)): + q_pages.put(i + 1) + + for i in range(10): + thread = threading.Thread(target=get_git_urls, args=(url, private_token, q_pages, fi)) + thread.start() + threads.append(thread) + + for thread in threads: + thread.join() + + res = push_to_api(git_urls, cobra_ip, key, fi, format, output, rules, dels) + + else: + res = False + + if res: + logger.info("[GIT-PRO] Git push success") + logger.info("[GIT-PRO] All projects have been pushed") + else: + logger.warning("[GIT-PRO] Git push fail") + + fi.close() + + except requests.exceptions.MissingSchema: + logger.warning('[GIT-PRO] Please write gitlab_url and private_token in config file') - fi.close() - logger.info("All projects have been pushed") + except requests.exceptions.ConnectionError: + logger.warning('[GIT-PRO] Please check the cobra_ip or gitlab_url is right') + + except requests.exceptions.InvalidSchema: + logger.warning('[GIT-PRO] Please add http:// before the cobra_ip or gitlab_url') + + except Exception as e: + logger.warning('[GIT-PRO] {}'.format(e.message)) def get_git_urls(url, private_token, q_pages, fi): @@ -90,39 +129,69 @@ def get_git_urls(url, private_token, q_pages, fi): git_urls.append(request_url) elif r.status_code == 404: - logger.warning("page %d 404" % page) + logger.warning("[GIT-PRO] page %d 404" % page) else: - logger.warning("page %d is %d" % page, r.status_code) + logger.warning("[GIT-PRO] page %d is %d" % page, r.status_code) q_pages.task_done() def request_target(target_url, params=None, header=None, method="get"): + """ + start request + :param target_url: + :param params: + :param header: + :param method: + :return: + """ if method == "get": response = requests.get(url=target_url, params=params, headers=header) return response + if method == "post": response = requests.post(url=target_url, data=json.dumps(params), headers=header) return response -def push_to_api(urls, cobra_ip, key, fi): +def push_to_api(urls, cobra_ip, key, fi, format, output, rules, dels): + """ + :param urls: + :param cobra_ip: + :param key: + :param fi: + :param format: + :param output: + :param rules: + :param dels: + :return: + """ headers = {"Content-Type": "application/json"} url = cobra_ip + "/api/add" - payload = {"key": key, "target": urls, "dels": True, "rule": "cvi-190009"} + payload = {"key": key, "target": urls, "dels": dels, "formatter": format, "output": output, + "rule": rules} r = request_target(url, payload, headers, method="post") + if r.status_code == 200: fi.write(str(r.json()) + '\n') - logger.info(r.json()) + logger.info('[GIT-PRO] ' + str(r.json())) return True + elif r.status_code == 404: - logger.info("The page is 404") + logger.info("[GIT-PRO] The page is 404") + else: - logger.info(r.json()) + logger.info('[GIT-PRO] ' + str(r.json())) return False def get_pages(url, private_token): + """ + get the pages num + :param url: + :param private_token: + :return: + """ params = {"private_token": private_token} response = request_target(url, params) res = response.headers['link'].split(",") @@ -132,5 +201,42 @@ def get_pages(url, private_token): return pages +def _check_rule_name(name): + return re.match(r'^(cvi|CVI)-\d{6}(\.xml)?', name.strip()) is not None + + if __name__ == '__main__': - start() + special_rules = [] + + parser = argparse.ArgumentParser(prog='git_projects', epilog=__epilog_git__, description=__introduction_git__, formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('-t', '--target', dest='target', action='store', default='', metavar='', help='The git address or git list, e.g: test/vul/v.php,test/vul/v.java') + parser.add_argument('-f', '--format', dest='format', action='store', default='json', metavar='', choices=['json', 'csv', 'xml'], help='report output format') + parser.add_argument('-o', '--output', dest='output', action='store', default='', metavar='', help='report output STREAM, FILE, HTTP API URL, MAIL') + parser.add_argument('-r', '--rule', dest='rules', action='store', default=None, metavar='', help='specifies rules e.g: CVI-100001,cvi-190001') + parser.add_argument('-d', '--dels', dest='dels', action='store_true', default=False, help='del target directory True or False') + parser.add_argument('-a', '--all', dest='all', action='store_true', default=False, help='Git push all git-projects from gitlab') + args = parser.parse_args() + + if args.target == '' and args.all is False: + parser.print_help() + exit() + + if ',' in args.target: + targets = args.target.split(',') + else: + targets = args.target + + try: + if ',' in args.rules: + rules = args.rules.split(',') + for rule in rules: + if _check_rule_name(rule) is False: + logger.critical('[GIT-PRO] Exception special rule name(e.g: CVI-110001): {sr}'.format(sr=rule)) + else: + if _check_rule_name(args.rules) is False: + logger.critical('[GIT-PRO] Exception special rule name(e.g: CVI-110001): {sr}'.format(sr=args.rules)) + + except TypeError: + logger.info('[GIT-PRO] The rules is None, Cobra will use all rules to scan') + + start(targets, args.format, args.output, args.rules, args.dels, args.all) diff --git a/tests/test_cve_parse.py b/tests/test_cve_parse.py index 79bd0240..5c9f4cf8 100644 --- a/tests/test_cve_parse.py +++ b/tests/test_cve_parse.py @@ -112,7 +112,8 @@ def test_download_rule_gz(): files = download_rule_gz() assert isinstance(files, list) for file_ in files: - os.remove(file_) + if os.path.exists(file_): + os.remove(file_) def test_un_gz(): @@ -120,7 +121,8 @@ def test_un_gz(): res = un_gz(files) assert res is True for year in range(2002, datetime.datetime.now().year+1): - os.remove(project_directory+"/rules/%d.xml" % year) + if os.path.exists(project_directory+"/rules/%d.xml" % year): + os.remove(project_directory+"/rules/%d.xml" % year) def test_rule_single():