diff --git a/blockstack_zones/__init__.py b/blockstack_zones/__init__.py index 558e299..dcfaadb 100644 --- a/blockstack_zones/__init__.py +++ b/blockstack_zones/__init__.py @@ -1,3 +1,4 @@ -from parse_zone_file import parse_zone_file -from make_zone_file import make_zone_file -from exceptions import InvalidLineException +# flake8: noqa +from .parse_zone_file import parse_zone_file +from .make_zone_file import make_zone_file +from .exceptions import InvalidLineException diff --git a/blockstack_zones/configs.py b/blockstack_zones/configs.py index 817746e..7e7bad5 100644 --- a/blockstack_zones/configs.py +++ b/blockstack_zones/configs.py @@ -28,4 +28,4 @@ {spf}\n\ \n\ {uri}\n\ -""" \ No newline at end of file +""" diff --git a/blockstack_zones/exceptions.py b/blockstack_zones/exceptions.py index 0776e4b..73cbdb3 100644 --- a/blockstack_zones/exceptions.py +++ b/blockstack_zones/exceptions.py @@ -1,2 +1,2 @@ class InvalidLineException(Exception): - pass \ No newline at end of file + pass diff --git a/blockstack_zones/make_zone_file.py b/blockstack_zones/make_zone_file.py index 81ea73f..e512d8d 100644 --- a/blockstack_zones/make_zone_file.py +++ b/blockstack_zones/make_zone_file.py @@ -35,7 +35,7 @@ def make_zone_file(json_zone_file_input, origin=None, ttl=None, template=None): # careful... json_zone_file = copy.deepcopy(json_zone_file_input) if origin is not None: - json_zone_file['$origin'] = origin + json_zone_file['$origin'] = origin if ttl is not None: json_zone_file['$ttl'] = ttl diff --git a/blockstack_zones/parse_zone_file.py b/blockstack_zones/parse_zone_file.py index aeb3b37..17fc1f6 100644 --- a/blockstack_zones/parse_zone_file.py +++ b/blockstack_zones/parse_zone_file.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python """ Known limitations: @@ -10,13 +10,10 @@ 'TXT', 'SRV', 'SPF', 'URI' """ -import copy -import datetime -import time import argparse from collections import defaultdict -from .configs import SUPPORTED_RECORDS, DEFAULT_TEMPLATE +from .configs import SUPPORTED_RECORDS # flake8: noqa from .exceptions import InvalidLineException @@ -73,7 +70,8 @@ def make_parser(): make_rr_subparser(subparsers, "MX", [("preference", str), ("host", str)]) make_rr_subparser(subparsers, "TXT", [("txt", str)]) make_rr_subparser(subparsers, "PTR", [("host", str)]) - make_rr_subparser(subparsers, "SRV", [("priority", int), ("weight", int), ("port", int), ("target", str)]) + make_rr_subparser(subparsers, "SRV", [("priority", int), ("weight", int), ("port", int), + ("target", str)]) make_rr_subparser(subparsers, "SPF", [("data", str)]) make_rr_subparser(subparsers, "URI", [("priority", int), ("weight", int), ("target", str)]) @@ -131,11 +129,11 @@ def tokenize_line(line): continue elif c == ';': if not escape: - # comment + # comment ret.append(tokbuf) tokbuf = "" break - + # normal character tokbuf += c escape = False @@ -173,7 +171,7 @@ def remove_comments(text): lines = text.split("\n") for line in lines: if len(line) == 0: - continue + continue line = serialize(tokenize_line(line)) ret.append(line) @@ -185,7 +183,7 @@ def flatten(text): """ Flatten the text: * make sure each record is on one line. - * remove parenthesis + * remove parenthesis """ lines = text.split("\n") @@ -193,10 +191,10 @@ def flatten(text): tokens = [] for l in lines: if len(l) == 0: - continue + continue l = l.replace("\t", " ") - tokens += filter(lambda x: len(x) > 0, l.split(" ")) + [''] + tokens += list(filter(lambda x: len(x) > 0, l.split(" "))) + [''] # find (...) and turn it into a single line ("capture" it) capturing = False @@ -210,7 +208,7 @@ def flatten(text): if len(captured) > 0: flattened.append(" ".join(captured)) captured = [] - continue + continue if tok.startswith("("): # begin grouping @@ -220,7 +218,7 @@ def flatten(text): if capturing and tok.endswith(")"): # end grouping. next end-of-line will turn this sequence into a flat line tok = tok.rstrip(")") - capturing = False + capturing = False captured.append(tok) @@ -257,7 +255,7 @@ def remove_class(text): def add_default_name(text): """ - Go through each line of the text and ensure that + Go through each line of the text and ensure that a name is defined. Use '@' if there is none. """ global SUPPORTED_RECORDS @@ -271,7 +269,7 @@ def add_default_name(text): if tokens[0] in SUPPORTED_RECORDS and not tokens[0].startswith("$"): # add back the name - tokens = ['@'] + tokens + tokens = ['@'] + tokens ret.append(serialize(tokens)) @@ -280,7 +278,7 @@ def add_default_name(text): def parse_line(parser, record_token, parsed_records): """ - Given the parser, capitalized list of a line's tokens, and the current set of records + Given the parser, capitalized list of a line's tokens, and the current set of records parsed so far, parse it into a dictionary. Return the new set of parsed records. @@ -303,7 +301,7 @@ def parse_line(parser, record_token, parsed_records): rr, unmatched = parser.parse_known_args(record_token) assert len(unmatched) == 0, "Unmatched fields: %s" % unmatched except (SystemExit, AssertionError, InvalidLineException): - # invalid argument + # invalid argument raise InvalidLineException(line) record_dict = rr.__dict__ @@ -320,7 +318,7 @@ def parse_line(parser, record_token, parsed_records): assert record_type is not None, "Unknown record type in %s" % rr # clean fields - for field in record_dict.keys(): + for field in list(record_dict.keys()): if record_dict[field] is None: del record_dict[field] @@ -329,7 +327,7 @@ def parse_line(parser, record_token, parsed_records): # special record-specific fix-ups if record_type == 'PTR': record_dict['fullname'] = record_dict['name'] + '.' + current_origin - + if len(record_dict) > 0: if record_type.startswith("$"): # put the value directly diff --git a/blockstack_zones/record_processors.py b/blockstack_zones/record_processors.py index fb8b2ab..428ce03 100644 --- a/blockstack_zones/record_processors.py +++ b/blockstack_zones/record_processors.py @@ -1,4 +1,5 @@ import copy +from builtins import range def process_origin(data, template): @@ -30,7 +31,7 @@ def process_soa(data, template): record = template[:] if data is not None: - + assert len(data) == 1, "Only support one SOA RR at this time" data = data[0] @@ -45,8 +46,8 @@ def process_soa(data, template): soadat.append(data_name) if data.get('ttl') is not None: - soadat.append( str(data['ttl']) ) - + soadat.append(str(data['ttl'])) + soadat.append("IN") soadat.append("SOA") @@ -66,7 +67,7 @@ def process_soa(data, template): record = record.replace("{soa}", soa_txt) else: - # clear all SOA fields + # clear all SOA fields record = record.replace("{soa}", "") return record @@ -78,10 +79,10 @@ def quote_field(data, field): Return the new data records. """ if data is None: - return None + return None data_dup = copy.deepcopy(data) - for i in xrange(0, len(data_dup)): + for i in range(0, len(data_dup)): data_dup[i][field] = '"%s"' % data_dup[i][field] data_dup[i][field] = data_dup[i][field].replace(";", "\;") @@ -107,15 +108,15 @@ def process_rr(data, record_type, record_keys, field, template): assert type(data) == list, "Data must be a list" record = "" - for i in xrange(0, len(data)): + for i in range(0, len(data)): for record_key in record_keys: assert record_key in data[i].keys(), "Missing '%s'" % record_key record_data = [] - record_data.append( str(data[i].get('name', '@')) ) + record_data.append(str(data[i].get('name', '@'))) if data[i].get('ttl') is not None: - record_data.append( str(data[i]['ttl']) ) + record_data.append(str(data[i]['ttl'])) record_data.append(record_type) record_data += [str(data[i][record_key]) for record_key in record_keys] @@ -193,6 +194,6 @@ def process_uri(data, template): """ Replace {uri} in templtae with the serialized URI records """ - # quote target + # quote target data_dup = quote_field(data, "target") return process_rr(data_dup, "URI", ["priority", "weight", "target"], "{uri}", template) diff --git a/circle.yml b/circle.yml new file mode 100644 index 0000000..3f7dfe2 --- /dev/null +++ b/circle.yml @@ -0,0 +1,4 @@ +dependencies: + override: + - pip install tox tox-pyenv + - pyenv local 2.7.10 3.4.3 3.5.0 diff --git a/setup.py b/setup.py index 57d4fcb..960cf69 100755 --- a/setup.py +++ b/setup.py @@ -19,6 +19,7 @@ packages=find_packages(), zip_safe=False, install_requires=[ + 'future' ], classifiers=[ 'Intended Audience :: Developers', diff --git a/tests/zonefile_forward.json b/tests/data/zonefile_forward.json similarity index 100% rename from tests/zonefile_forward.json rename to tests/data/zonefile_forward.json diff --git a/tests/zonefile_forward.txt b/tests/data/zonefile_forward.txt similarity index 100% rename from tests/zonefile_forward.txt rename to tests/data/zonefile_forward.txt diff --git a/tests/zonefile_reverse.json b/tests/data/zonefile_reverse.json similarity index 100% rename from tests/zonefile_reverse.json rename to tests/data/zonefile_reverse.json diff --git a/tests/zonefile_reverse.txt b/tests/data/zonefile_reverse.txt similarity index 100% rename from tests/zonefile_reverse.txt rename to tests/data/zonefile_reverse.txt diff --git a/tests/zonefile_reverse_ipv6.json b/tests/data/zonefile_reverse_ipv6.json similarity index 100% rename from tests/zonefile_reverse_ipv6.json rename to tests/data/zonefile_reverse_ipv6.json diff --git a/tests/zonefile_reverse_ipv6.txt b/tests/data/zonefile_reverse_ipv6.txt similarity index 100% rename from tests/zonefile_reverse_ipv6.txt rename to tests/data/zonefile_reverse_ipv6.txt diff --git a/test_sample_data.py b/tests/test_sample_data.py similarity index 68% rename from test_sample_data.py rename to tests/test_sample_data.py index 6919059..efa20f6 100644 --- a/test_sample_data.py +++ b/tests/test_sample_data.py @@ -77,33 +77,35 @@ "minimum": 86400 }, "ns": [ - { "host": "NS1.NAMESERVER.NET." }, - { "host": "NS2.NAMESERVER.NET." } + {"host": "NS1.NAMESERVER.NET."}, + {"host": "NS2.NAMESERVER.NET."} ], "a": [ - { "name": "@", "ip": "127.0.0.1" }, - { "name": "www", "ip": "127.0.0.1" }, - { "name": "mail", "ip": "127.0.0.1" } + {"name": "@", "ip": "127.0.0.1"}, + {"name": "www", "ip": "127.0.0.1"}, + {"name": "mail", "ip": "127.0.0.1"} ], "aaaa": [ - { "ip": "::1" }, - { "name": "mail", "ip": "2001:db8::1" } + {"ip": "::1"}, + {"name": "mail", "ip": "2001:db8::1"} ], "cname": [ - { "name": "mail1", "alias": "mail" }, - { "name": "mail2", "alias": "mail" } + {"name": "mail1", "alias": "mail"}, + {"name": "mail2", "alias": "mail"} ], "mx": [ - { "preference": 0, "host": "mail1" }, - { "preference": 10, "host": "mail2" } + {"preference": 0, "host": "mail1"}, + {"preference": 10, "host": "mail2"} ], "txt": [ - { "name": "txt1", "txt": "hello" }, - { "name": "txt2", "txt": "world" } + {"name": "txt1", "txt": "hello"}, + {"name": "txt2", "txt": "world"} ], "srv": [ - { "name": "_xmpp-client._tcp", "target": "jabber", "priority": 10, "weight": 0, "port": 5222 }, - { "name": "_xmpp-server._tcp", "target": "jabber", "priority": 10, "weight": 0, "port": 5269 } + {"name": "_xmpp-client._tcp", "target": "jabber", "priority": 10, + "weight": 0, "port": 5222}, + {"name": "_xmpp-server._tcp", "target": "jabber", "priority": 10, + "weight": 0, "port": 5269} ] }, "sample_3": { @@ -119,25 +121,25 @@ "minimum": 86400 }, "ns": [ - { "host": "NS1.NAMESERVER.NET." }, - { "host": "NS2.NAMESERVER.NET." } + {"host": "NS1.NAMESERVER.NET."}, + {"host": "NS2.NAMESERVER.NET."} ], "a": [ - { "name": "@", "ip": "127.0.0.1" }, - { "name": "www", "ip": "127.0.0.1" }, - { "name": "mail", "ip": "127.0.0.1" } + {"name": "@", "ip": "127.0.0.1"}, + {"name": "www", "ip": "127.0.0.1"}, + {"name": "mail", "ip": "127.0.0.1"} ], "aaaa": [ - { "ip": "::1" }, - { "name": "mail", "ip": "2001:db8::1" } + {"ip": "::1"}, + {"name": "mail", "ip": "2001:db8::1"} ], - "cname":[ - { "name": "mail1", "alias": "mail" }, - { "name": "mail2", "alias": "mail" } + "cname": [ + {"name": "mail1", "alias": "mail"}, + {"name": "mail2", "alias": "mail"} ], - "mx":[ - { "preference": 0, "host": "mail1" }, - { "preference": 10, "host": "mail2" } + "mx": [ + {"preference": 0, "host": "mail1"}, + {"preference": 10, "host": "mail2"} ] } -} \ No newline at end of file +} diff --git a/unit_tests.py b/tests/unit_tests.py similarity index 79% rename from unit_tests.py rename to tests/unit_tests.py index e01d466..7b78f2a 100644 --- a/unit_tests.py +++ b/tests/unit_tests.py @@ -1,10 +1,16 @@ import json -import traceback import unittest -from test import test_support +from builtins import str, bytes +try: + from test import support +except ImportError: + # Try to import from the Python2 path + from test import test_support as support + from blockstack_zones import make_zone_file, parse_zone_file from test_sample_data import zone_files, zone_file_objects + class ZoneFileTests(unittest.TestCase): def setUp(self): pass @@ -15,8 +21,8 @@ def tearDown(self): def test_zone_file_creation_1(self): json_zone_file = zone_file_objects["sample_1"] zone_file = make_zone_file(json_zone_file) - print zone_file - self.assertTrue(isinstance(zone_file, (unicode, str))) + print(zone_file) + self.assertTrue(isinstance(zone_file, (str, bytes))) self.assertTrue("$ORIGIN" in zone_file) self.assertTrue("$TTL" in zone_file) self.assertTrue("@ 1D URI" in zone_file) @@ -24,8 +30,8 @@ def test_zone_file_creation_1(self): def test_zone_file_creation_2(self): json_zone_file = zone_file_objects["sample_2"] zone_file = make_zone_file(json_zone_file) - print zone_file - self.assertTrue(isinstance(zone_file, (unicode, str))) + print(zone_file) + self.assertTrue(isinstance(zone_file, (str, bytes))) self.assertTrue("$ORIGIN" in zone_file) self.assertTrue("$TTL" in zone_file) self.assertTrue("@ IN SOA" in zone_file) @@ -33,15 +39,15 @@ def test_zone_file_creation_2(self): def test_zone_file_creation_3(self): json_zone_file = zone_file_objects["sample_3"] zone_file = make_zone_file(json_zone_file) - print zone_file - self.assertTrue(isinstance(zone_file, (unicode, str))) + print(zone_file) + self.assertTrue(isinstance(zone_file, (str, bytes))) self.assertTrue("$ORIGIN" in zone_file) self.assertTrue("$TTL" in zone_file) self.assertTrue("@ IN SOA" in zone_file) def test_zone_file_parsing_1(self): zone_file = parse_zone_file(zone_files["sample_1"]) - print json.dumps(zone_file, indent=2) + print(json.dumps(zone_file, indent=2)) self.assertTrue(isinstance(zone_file, dict)) self.assertTrue("a" in zone_file) self.assertTrue("cname" in zone_file) @@ -51,7 +57,7 @@ def test_zone_file_parsing_1(self): def test_zone_file_parsing_2(self): zone_file = parse_zone_file(zone_files["sample_2"]) - #print json.dumps(zone_file, indent=2) + # print(json.dumps(zone_file, indent=2)) self.assertTrue(isinstance(zone_file, dict)) self.assertTrue("a" in zone_file) self.assertTrue("cname" in zone_file) @@ -60,7 +66,7 @@ def test_zone_file_parsing_2(self): def test_zone_file_parsing_3(self): zone_file = parse_zone_file(zone_files["sample_3"]) - #print json.dumps(zone_file, indent=2) + # print(json.dumps(zone_file, indent=2)) self.assertTrue(isinstance(zone_file, dict)) self.assertTrue("soa" in zone_file) self.assertTrue("mx" in zone_file) @@ -70,8 +76,9 @@ def test_zone_file_parsing_3(self): self.assertTrue("$ttl" in zone_file) self.assertTrue("$origin" in zone_file) + def test_main(): - test_support.run_unittest( + support.run_unittest( ZoneFileTests ) diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..ac853c5 --- /dev/null +++ b/tox.ini @@ -0,0 +1,25 @@ +# Tox (http://tox.testrun.org/) is a tool for running tests +# in multiple virtualenvs. This configuration file will run the +# test suite on all supported python versions. To use it, "pip install tox" +# and then run "tox" from this directory. + +[tox] +envlist = style, py27, py34, py35 + +[testenv] +deps = nose +commands = nosetests +# commands = py.test --cov-report=html --cov=blockstack_zones {posargs} + +[testenv:style] +basepython=python +deps = pylint + flake8 +commands=pylint blockstack_zones {posargs: -E} + flake8 blockstack_zones tests + +[flake8] +max-line-length = 100 + +[pytest] +norecursedirs = .* build *.egg venv