Skip to content
This repository has been archived by the owner on Nov 28, 2023. It is now read-only.

Commit

Permalink
Merge pull request #999 from BlBana/master
Browse files Browse the repository at this point in the history
优化了git_projects脚本,支持参数传入扫描单个/多个目标,单条/多条指定规则,输出格式,输出方式,扫描完后是否删除项目
  • Loading branch information
FeeiCN authored Apr 2, 2018
2 parents df11d17 + a961418 commit 9fb4197
Show file tree
Hide file tree
Showing 4 changed files with 189 additions and 35 deletions.
21 changes: 21 additions & 0 deletions cobra/__version__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,24 @@
python {m} -t {tg} -f json -o http://push.to.com/api
python {m} -H 127.0.0.1 -P 8888
""".format(m='cobra.py', td='tests/vulnerabilities', tg='https://github.com/ethicalhack3r/DVWA')

__introduction_git__ = """
This script can push your target to the api
Please write cobra_ip, secret_key in config when you want to scan the specified git address
Please write gitlab_url, private_token, cobra_ip, secret_key when you want to scan all gitlab's projects
"""

__epilog_git__ = """Usage:
python {m} -a
python {m} -a -r cvi-190001,cvi-190002
python {m} -a -f json -o /tmp/report.json
python {m} -t {td}
python {m} -t {td},{td1}
python {m} -t {td},{td1} -d
python {m} -t {td} -r cvi-190001,cvi-190002
python {m} -t {td} -f json -o /tmp/report.json
python {m} -t {tg} -f json -o [email protected]
python {m} -t {tg} -f json -o http://push.to.com/api
""".format(m='git_projcets.py', td='tests/vulnerabilities', td1='tests/dvwa',
tg='https://github.com/ethicalhack3r/DVWA')
35 changes: 30 additions & 5 deletions cobra/cve.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import gzip
import xml.etree.cElementTree as eT
import multiprocessing
import subprocess
from .config import project_directory, Config, config_path
from .log import logger
from .dependencies import Dependencies
Expand Down Expand Up @@ -283,6 +284,24 @@ def download_rule_gz():
for t in threads:
t.join()
end_time = datetime.datetime.now()
for afile in files:
param = ['file', afile]
p = subprocess.Popen(param, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
res_out, res_err = p.communicate()

res_out = res_out.decode('utf-8')
res_err = res_err.decode('utf-8')

if 'HTML' in res_out:
os.remove(afile)
afile_name = os.path.split(afile)[1]
year = afile_name.split('.')[0]
url = "https://static.nvd.nist.gov/feeds/xml/cve/2.0/nvdcve-2.0-" + str(year) + ".xml.gz"
try:
urlretrieve(url, afile)
except IOError:
logger.warning('[CVE] The {} download fail'.format(afile))

logger.info("All CVE xml file already download success, use time:%ds" % (end_time - start_time).seconds)
return files

Expand All @@ -292,11 +311,17 @@ def un_gz(gz_files):
start_time = datetime.datetime.now()
logger.info("Start decompress rule files, Please wait a moment....")
for gz_file in gz_files:
f_name = gz_file.replace(".gz", "")
g_file = gzip.GzipFile(gz_file)
open(f_name, "wb+").write(g_file.read())
g_file.close()
os.remove(gz_file)
if os.path.exists(gz_file):
f_name = gz_file.replace(".gz", "")

try:
g_file = gzip.GzipFile(gz_file, "rb")
open(f_name, "wb+").write(g_file.read())
g_file.close()
except IOError:
logger.warning('[CVE] The {} download fail'.format(gz_file))

os.remove(gz_file)
end_time = datetime.datetime.now()
logger.info("Decompress success, use time:%ds" % (end_time - start_time).seconds)
return True
Expand Down
162 changes: 134 additions & 28 deletions git_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@
import requests
import re
import threading
import argparse
from cobra.log import logger
from cobra.config import code_path, Config
from cobra.__version__ import __epilog_git__, __introduction_git__

try:
# Python 3
Expand All @@ -25,40 +27,77 @@
# Python 2
import Queue as queue


git_urls = []


def start():
def start(target, format, output, rules, dels, all):
"""
start push target to api
:param target:
:param format:
:param output:
:param rules:
:param dels:
:param all:
:return:
"""
url = Config('git', 'gitlab_url').value
private_token = Config('git', 'private_token').value
cobra_ip = Config('git', 'cobra_ip').value
key = Config('cobra', 'secret_key').value
threads = []
pages = get_pages(url, private_token)
q_pages = queue.Queue(pages)
result_path = code_path + '/result_sid'
fi = open(result_path, 'w+')
for i in range(int(pages)):
q_pages.put(i + 1)
fi = open(result_path, 'a+')

for i in range(10):
thread = threading.Thread(target=get_git_urls, args=(url, private_token, q_pages, fi))
thread.start()
threads.append(thread)
try:
if all is False and target is not '':
if isinstance(target, list):
for tar in target:
fi.write(tar + '\n')
else:
fi.write(target + '\n')

for thread in threads:
thread.join()
res = push_to_api(target, cobra_ip, key, fi, format, output, rules, dels)

res = push_to_api(git_urls, cobra_ip, key, fi)
elif all is True and target is '':
pages = get_pages(url, private_token)
q_pages = queue.Queue(pages)

if res:
logger.info("Git push success: {}".format(len(git_urls)))
else:
logger.info("Git push fail")
for i in range(int(pages)):
q_pages.put(i + 1)

for i in range(10):
thread = threading.Thread(target=get_git_urls, args=(url, private_token, q_pages, fi))
thread.start()
threads.append(thread)

for thread in threads:
thread.join()

res = push_to_api(git_urls, cobra_ip, key, fi, format, output, rules, dels)

else:
res = False

if res:
logger.info("[GIT-PRO] Git push success")
logger.info("[GIT-PRO] All projects have been pushed")
else:
logger.warning("[GIT-PRO] Git push fail")

fi.close()

except requests.exceptions.MissingSchema:
logger.warning('[GIT-PRO] Please write gitlab_url and private_token in config file')

fi.close()
logger.info("All projects have been pushed")
except requests.exceptions.ConnectionError:
logger.warning('[GIT-PRO] Please check the cobra_ip or gitlab_url is right')

except requests.exceptions.InvalidSchema:
logger.warning('[GIT-PRO] Please add http:// before the cobra_ip or gitlab_url')

except Exception as e:
logger.warning('[GIT-PRO] {}'.format(e.message))


def get_git_urls(url, private_token, q_pages, fi):
Expand Down Expand Up @@ -90,39 +129,69 @@ def get_git_urls(url, private_token, q_pages, fi):
git_urls.append(request_url)

elif r.status_code == 404:
logger.warning("page %d 404" % page)
logger.warning("[GIT-PRO] page %d 404" % page)

else:
logger.warning("page %d is %d" % page, r.status_code)
logger.warning("[GIT-PRO] page %d is %d" % page, r.status_code)
q_pages.task_done()


def request_target(target_url, params=None, header=None, method="get"):
"""
start request
:param target_url:
:param params:
:param header:
:param method:
:return:
"""
if method == "get":
response = requests.get(url=target_url, params=params, headers=header)
return response

if method == "post":
response = requests.post(url=target_url, data=json.dumps(params), headers=header)
return response


def push_to_api(urls, cobra_ip, key, fi):
def push_to_api(urls, cobra_ip, key, fi, format, output, rules, dels):
"""
:param urls:
:param cobra_ip:
:param key:
:param fi:
:param format:
:param output:
:param rules:
:param dels:
:return:
"""
headers = {"Content-Type": "application/json"}
url = cobra_ip + "/api/add"
payload = {"key": key, "target": urls, "dels": True, "rule": "cvi-190009"}
payload = {"key": key, "target": urls, "dels": dels, "formatter": format, "output": output,
"rule": rules}
r = request_target(url, payload, headers, method="post")

if r.status_code == 200:
fi.write(str(r.json()) + '\n')
logger.info(r.json())
logger.info('[GIT-PRO] ' + str(r.json()))
return True

elif r.status_code == 404:
logger.info("The page is 404")
logger.info("[GIT-PRO] The page is 404")

else:
logger.info(r.json())
logger.info('[GIT-PRO] ' + str(r.json()))
return False


def get_pages(url, private_token):
"""
get the pages num
:param url:
:param private_token:
:return:
"""
params = {"private_token": private_token}
response = request_target(url, params)
res = response.headers['link'].split(",")
Expand All @@ -132,5 +201,42 @@ def get_pages(url, private_token):
return pages


def _check_rule_name(name):
return re.match(r'^(cvi|CVI)-\d{6}(\.xml)?', name.strip()) is not None


if __name__ == '__main__':
start()
special_rules = []

parser = argparse.ArgumentParser(prog='git_projects', epilog=__epilog_git__, description=__introduction_git__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-t', '--target', dest='target', action='store', default='', metavar='<target>', help='The git address or git list, e.g: test/vul/v.php,test/vul/v.java')
parser.add_argument('-f', '--format', dest='format', action='store', default='json', metavar='<format>', choices=['json', 'csv', 'xml'], help='report output format')
parser.add_argument('-o', '--output', dest='output', action='store', default='', metavar='<output>', help='report output STREAM, FILE, HTTP API URL, MAIL')
parser.add_argument('-r', '--rule', dest='rules', action='store', default=None, metavar='<rule_id>', help='specifies rules e.g: CVI-100001,cvi-190001')
parser.add_argument('-d', '--dels', dest='dels', action='store_true', default=False, help='del target directory True or False')
parser.add_argument('-a', '--all', dest='all', action='store_true', default=False, help='Git push all git-projects from gitlab')
args = parser.parse_args()

if args.target == '' and args.all is False:
parser.print_help()
exit()

if ',' in args.target:
targets = args.target.split(',')
else:
targets = args.target

try:
if ',' in args.rules:
rules = args.rules.split(',')
for rule in rules:
if _check_rule_name(rule) is False:
logger.critical('[GIT-PRO] Exception special rule name(e.g: CVI-110001): {sr}'.format(sr=rule))
else:
if _check_rule_name(args.rules) is False:
logger.critical('[GIT-PRO] Exception special rule name(e.g: CVI-110001): {sr}'.format(sr=args.rules))

except TypeError:
logger.info('[GIT-PRO] The rules is None, Cobra will use all rules to scan')

start(targets, args.format, args.output, args.rules, args.dels, args.all)
6 changes: 4 additions & 2 deletions tests/test_cve_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,15 +112,17 @@ def test_download_rule_gz():
files = download_rule_gz()
assert isinstance(files, list)
for file_ in files:
os.remove(file_)
if os.path.exists(file_):
os.remove(file_)


def test_un_gz():
files = download_rule_gz()
res = un_gz(files)
assert res is True
for year in range(2002, datetime.datetime.now().year+1):
os.remove(project_directory+"/rules/%d.xml" % year)
if os.path.exists(project_directory+"/rules/%d.xml" % year):
os.remove(project_directory+"/rules/%d.xml" % year)


def test_rule_single():
Expand Down

0 comments on commit 9fb4197

Please sign in to comment.