From 88d4aa043c5a305adecf28348b193b60a0836633 Mon Sep 17 00:00:00 2001 From: janpisl Date: Sun, 12 Aug 2018 20:16:12 +0200 Subject: [PATCH 01/21] Allow storing outputs in database Implement functionality for storing output data from run processes in a PostgreSQL or SQLite database --- .gitignore | 2 + default-sample.cfg | 15 +- pywps/configuration.py | 3 + pywps/inout/formats/__init__.py | 83 +++-- pywps/inout/outputs.py | 21 +- pywps/inout/storage/__init__.py | 49 +++ pywps/inout/storage/db/__init__.py | 154 ++++++++ pywps/inout/storage/db/pg.py | 74 ++++ pywps/inout/storage/db/sqlite.py | 31 ++ pywps/inout/{storage.py => storage/file.py} | 45 +-- tests/data/other/corn.csv | 106 ++++++ tests/data/other/test.txt | 2 + tests/test_formats.py | 1 + tests/test_storage.py | 371 ++++++++++++++++++++ 14 files changed, 887 insertions(+), 70 deletions(-) create mode 100644 pywps/inout/storage/__init__.py create mode 100644 pywps/inout/storage/db/__init__.py create mode 100644 pywps/inout/storage/db/pg.py create mode 100644 pywps/inout/storage/db/sqlite.py rename pywps/inout/{storage.py => storage/file.py} (81%) create mode 100644 tests/data/other/corn.csv create mode 100644 tests/data/other/test.txt create mode 100644 tests/test_storage.py diff --git a/.gitignore b/.gitignore index f2cf27245..6de62bd55 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,5 @@ docs/_build *.orig .coverage +example.db +*.cfg diff --git a/default-sample.cfg b/default-sample.cfg index 4d96cc327..db45784a9 100644 --- a/default-sample.cfg +++ b/default-sample.cfg @@ -28,9 +28,10 @@ maxrequestsize=3mb url=http://localhost:5000/wps outputurl=http://localhost:5000/outputs/ outputpath=outputs -workdir=workdir +workdir=/tmp maxprocesses=10 parallelprocesses=2 +store_type=db [processing] mode=default @@ -44,3 +45,15 @@ format=%(asctime)s] [%(levelname)s] file=%(pathname)s line=%(lineno)s module=%(m [grass] gisbase=/usr/local/grass-7.3.svn/ + + +[db] +db_type=pg +dbname=dbname +user=username +password=password +host=localhost +port=5432 +dblocation=/tmp/db.sqlite +schema_name=test_schema + diff --git a/pywps/configuration.py b/pywps/configuration.py index 6ac8351b0..5771434ad 100755 --- a/pywps/configuration.py +++ b/pywps/configuration.py @@ -130,6 +130,9 @@ def load_configuration(cfgfiles=None): CONFIG.add_section('grass') CONFIG.set('grass', 'gisbase', '') + + CONFIG.add_section('db') + if not cfgfiles: cfgfiles = _get_default_config_files_location() diff --git a/pywps/inout/formats/__init__.py b/pywps/inout/formats/__init__.py index 9fbf51f56..07c02446e 100644 --- a/pywps/inout/formats/__init__.py +++ b/pywps/inout/formats/__init__.py @@ -16,11 +16,22 @@ from pywps.validator.mode import MODE from pywps.validator.base import emptyvalidator - _FORMATS = namedtuple('FORMATS', 'GEOJSON, JSON, SHP, GML, GEOTIFF, WCS,' 'WCS100, WCS110, WCS20, WFS, WFS100,' 'WFS110, WFS20, WMS, WMS130, WMS110,' - 'WMS100, TEXT, NETCDF, LAZ, LAS') + 'WMS100, TEXT, CSV, NETCDF, LAZ, LAS') + +# this should be Enum type (only compatible with Python 3) +class DATA_TYPE(object): + VECTOR = 0 + RASTER = 1 + OTHER = 2 + + def is_valid_datatype(data_type): + + known_values = [datatype for datatype in DATA_TYPE] + if data_type not in known_values: + raise Exception("Unknown data type") class Format(object): @@ -36,7 +47,7 @@ class Format(object): :param str extension: file extension """ - def __init__(self, mime_type, + def __init__(self, mime_type, data_type=None, schema=None, encoding=None, validate=emptyvalidator, mode=MODE.SIMPLE, extension=None): @@ -47,12 +58,14 @@ def __init__(self, mime_type, self._encoding = None self._schema = None self._extension = None + self._data_type = None self.mime_type = mime_type self.encoding = encoding self.schema = schema self.validate = validate self.extension = extension + self.data_type = data_type @property def mime_type(self): @@ -62,6 +75,23 @@ def mime_type(self): return self._mime_type + + @property + def data_type(self): + """Get format data type + """ + + return self._data_type + + + @data_type.setter + def data_type(self, data_type): + """Set format encoding + """ + + self._data_type = data_type + + @mime_type.setter def mime_type(self, mime_type): """Set format mime type @@ -143,7 +173,8 @@ def json(self): 'mime_type': self.mime_type, 'encoding': self.encoding, 'schema': self.schema, - 'extension': self.extension + 'extension': self.extension, + 'data_type': self.data_type } @json.setter @@ -156,30 +187,32 @@ def json(self, jsonin): self.encoding = jsonin['encoding'] self.schema = jsonin['schema'] self.extension = jsonin['extension'] + self.data_type = jsonin['data_type'] FORMATS = _FORMATS( - Format('application/vnd.geo+json', extension='.geojson'), - Format('application/json', extension='.json'), - Format('application/x-zipped-shp', extension='.zip'), - Format('application/gml+xml', extension='.gml'), - Format('image/tiff; subtype=geotiff', extension='.tiff'), - Format('application/xogc-wcs', extension='.xml'), - Format('application/x-ogc-wcs; version=1.0.0', extension='.xml'), - Format('application/x-ogc-wcs; version=1.1.0', extension='.xml'), - Format('application/x-ogc-wcs; version=2.0', extension='.xml'), - Format('application/x-ogc-wfs', extension='.xml'), - Format('application/x-ogc-wfs; version=1.0.0', extension='.xml'), - Format('application/x-ogc-wfs; version=1.1.0', extension='.xml'), - Format('application/x-ogc-wfs; version=2.0', extension='.xml'), - Format('application/x-ogc-wms', extension='.xml'), - Format('application/x-ogc-wms; version=1.3.0', extension='.xml'), - Format('application/x-ogc-wms; version=1.1.0', extension='.xml'), - Format('application/x-ogc-wms; version=1.0.0', extension='.xml'), - Format('text/plain', extension='.txt'), - Format('application/x-netcdf', extension='.nc'), - Format('application/octet-stream', extension='.laz'), - Format('application/octet-stream', extension='.las'), + Format('application/vnd.geo+json', extension='.geojson', data_type=DATA_TYPE.VECTOR), + Format('application/json', extension='.json', data_type=DATA_TYPE.VECTOR), + Format('application/x-zipped-shp', extension='.zip', data_type=DATA_TYPE.VECTOR), + Format('application/gml+xml', extension='.gml', data_type=DATA_TYPE.VECTOR), + Format('image/tiff; subtype=geotiff', extension='.tiff', data_type=DATA_TYPE.RASTER), + Format('application/xogc-wcs', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wcs; version=1.0.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wcs; version=1.1.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wcs; version=2.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wfs', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wfs; version=1.0.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wfs; version=1.1.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wfs; version=2.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wms', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wms; version=1.3.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wms; version=1.1.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('application/x-ogc-wms; version=1.0.0', extension='.xml', data_type=DATA_TYPE.VECTOR), + Format('text/plain', extension='.txt', data_type=DATA_TYPE.OTHER), + Format('text/csv', extension='.csv', data_type=DATA_TYPE.OTHER), + Format('application/x-netcdf', extension='.nc', data_type=DATA_TYPE.VECTOR), + Format('application/octet-stream', extension='.laz', data_type=DATA_TYPE.VECTOR), + Format('application/octet-stream', extension='.las', data_type=DATA_TYPE.VECTOR), ) diff --git a/pywps/inout/outputs.py b/pywps/inout/outputs.py index 092a96633..c909c788f 100644 --- a/pywps/inout/outputs.py +++ b/pywps/inout/outputs.py @@ -9,7 +9,9 @@ import lxml.etree as etree import six from pywps.inout import basic -from pywps.inout.storage import FileStorage +from pywps.inout.storage.file import FileStorage +from pywps.inout.storage.db import DbStorage +from pywps import configuration as config from pywps.validator.mode import MODE @@ -99,9 +101,24 @@ def _json_reference(self, data): data["type"] = "reference" # get_url will create the file and return the url for it - self.storage = FileStorage() data["href"] = self.get_url() + store_type = config.get_config_value('server', 'store_type') + self.storage = None + if store_type == 'db': + db_storage_instance = DbStorage() + self.storage = db_storage_instance.get_db_type() + else: + self.storage = FileStorage() + + + """ + to be implemented: + elif store_type == 's3' and \ + config.get_config_value('s3', 'bucket_name'): + self.storage = S3Storage() + """ + if self.data_format: if self.data_format.mime_type: data['mimetype'] = self.data_format.mime_type diff --git a/pywps/inout/storage/__init__.py b/pywps/inout/storage/__init__.py new file mode 100644 index 000000000..2024410f0 --- /dev/null +++ b/pywps/inout/storage/__init__.py @@ -0,0 +1,49 @@ +################################################################## +# Copyright 2018 Open Source Geospatial Foundation and others # +# licensed under MIT, Please consult LICENSE.txt for details # +################################################################## + +from abc import ABCMeta, abstractmethod + + + +class STORE_TYPE: + PATH = 0 + DB = 1 + + +class StorageAbstract(object): + """Data storage abstract class + """ + + __metaclass__ = ABCMeta + + @abstractmethod + def store(self, output): + """ + :param output: of type IOHandler + :returns: (type, store, url) where + type - is type of STORE_TYPE - number + store - string describing storage - file name, database connection + url - url, where the data can be downloaded + """ + pass + + + +class DummyStorage(StorageAbstract): + """Dummy empty storage implementation, does nothing + + Default instance, for non-reference output request + + >>> store = DummyStorage() + >>> assert store.store + """ + + def __init__(self): + """ + """ + + def store(self, ouput): + pass + diff --git a/pywps/inout/storage/db/__init__.py b/pywps/inout/storage/db/__init__.py new file mode 100644 index 000000000..801bc5963 --- /dev/null +++ b/pywps/inout/storage/db/__init__.py @@ -0,0 +1,154 @@ +################################################################## +# Copyright 2018 Open Source Geospatial Foundation and others # +# licensed under MIT, Please consult LICENSE.txt for details # +################################################################## + +import logging +from abc import ABCMeta, abstractmethod +from pywps import configuration as config +from pywps.inout.storage import db +from pywps.inout.formats import DATA_TYPE +from .. import STORE_TYPE +from .. import StorageAbstract +import sqlalchemy + + +LOGGER = logging.getLogger('PYWPS') + + +class DbStorage(StorageAbstract): + + def __init__(self): + # get db_type from configuration + try: + self.db_type = config.get_config_value('db', 'db_type').lower() + except KeyError: + raise exception("Database type has not been specified") + + + + @staticmethod + def get_db_type(): + from . import sqlite + from . import pg + # create an instance of the appropriate class + db_type = config.get_config_value('db', 'db_type').lower() + if db_type == "pg": + storage = pg.PgStorage() + elif db_type == "sqlite": + storage = sqlite.SQLiteStorage() + else: + raise Exception("Unknown database type: '{}'".format(self.db_type)) + + return storage + + def initdb(self): + pass + + + def store(self, output): + """ Creates reference that is returned to the client + """ + + DATA_TYPE.is_valid_datatype(output.output_format.data_type) + + if output.output_format.data_type is DATA_TYPE.VECTOR: + self.store_vector_output(output.file, output.identifier) + elif output.output_format.data_type is DATA_TYPE.RASTER: + self.store_raster_output(output.file, output.identifier) + elif output.output_format.data_type is DATA_TYPE.OTHER: + self.store_other_output(output.file, output.identifier, output.uuid) + else: + # This should never happen + raise Exception("Unknown data type") + + if isinstance(self, sqlite.SQLiteStorage): + url = '{}.{}'.format(self.target, output.identifier) + elif isinstance(self, pg.PgStorage): + url = '{}.{}.{}'.format(self.dbname, self.schema_name, output.identifier) + + # returns value for database storage defined in the STORE_TYPE class, + # name of the output file and a reference + return (STORE_TYPE.DB, output.file, url) + + + def store_vector_output(self, file_name, identifier): + """ Open output file, connect to SQLite database and copiy data there + """ + from osgeo import ogr + + if isinstance(self, sqlite.SQLiteStorage): + drv = ogr.GetDriverByName("SQLite") + dsc_out = drv.CreateDataSource(self.target) + elif isinstance(self, pg.PgStorage): + dsc_out = ogr.Open("PG:" + self.target) + + # connect to a database and copy output there + LOGGER.debug("Database: {}".format(self.target)) + dsc_in = ogr.Open(file_name) + if dsc_in is None: + raise Exception("Reading data failed.") + if dsc_out is None: + raise NoApplicableCode("Could not connect to the database.") + layer = dsc_out.CopyLayer(dsc_in.GetLayer(), identifier, + ['OVERWRITE=YES']) + + if layer is None: + raise Exception("Writing output data to the database failed.") + + dsc_out.Destroy() + dsc_in.Destroy() + + # returns process identifier (defined within the process) + return identifier + + + def store_raster_output(self, file_name, identifier): + pass + + + def store_other_output(self, file_name, identifier, uuid): + + from pywps import configuration as config + from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.orm import sessionmaker + + base = declarative_base() + + if isinstance(self, sqlite.SQLiteStorage): + engine = sqlalchemy.create_engine("sqlite:///{}".format(self.target)) + elif isinstance(self, pg.PgStorage): + engine = sqlalchemy.create_engine('postgresql://{}:{}@{}:{}/{}'.format( + self.dbname,self.password,self.host,self.port,self.user + ) + ) + + # Create table + class Other_output(base): + __tablename__ = identifier + if isinstance(self, pg.PgStorage): + __table_args__ = {'schema' : self.schema_name} + + + primary_key = Column(Integer, primary_key=True) + uuid = Column(String(64)) + data = Column(LargeBinary) + timestamp = Column(DateTime(timezone=True), server_default=func.now()) + + Session = sessionmaker(engine) + session = Session() + + base.metadata.create_all(engine) + + # Open file as binary + with open(file_name, "rb") as data: + out = data.read() + + # Add data to table + output = Other_output(uuid=uuid, data=out) + session.add(output) + session.commit() + + + return identifier \ No newline at end of file diff --git a/pywps/inout/storage/db/pg.py b/pywps/inout/storage/db/pg.py new file mode 100644 index 000000000..d1d5c4043 --- /dev/null +++ b/pywps/inout/storage/db/pg.py @@ -0,0 +1,74 @@ +################################################################## +# Copyright 2018 Open Source Geospatial Foundation and others # +# licensed under MIT, Please consult LICENSE.txt for details # +################################################################## + +import logging +from pywps import configuration as config +from pywps.exceptions import NoApplicableCode +from .. import STORE_TYPE +from pywps.inout.formats import DATA_TYPE +from . import DbStorage +import sqlalchemy + +LOGGER = logging.getLogger('PYWPS') + +class PgStorage(DbStorage): + + def __init__(self): + # TODO: more databases in config file + # create connection string + dbsettings = "db" + self.dbname = config.get_config_value(dbsettings, "dbname") + self.user = config.get_config_value(dbsettings, "user") + self.password = config.get_config_value(dbsettings, "password") + self.host = config.get_config_value(dbsettings, "host") + self.port = config.get_config_value(dbsettings, "port") + + + self.target = "dbname={} user={} password={} host={} port={}".format( + self.dbname, self.user, self.password, self.host, self.port + ) + + self.schema_name = config.get_config_value(dbsettings, "schema_name") + + self.initdb() + + + def store_raster_output(self, file_name, identifier): + + from subprocess import call, run, Popen, PIPE + + # Convert raster to an SQL query + command1 = ["raster2pgsql", "-a", file_name, self.schema_name + "." + identifier] + p = Popen(command1,stdout=PIPE) + # Apply the SQL query + command2 = ["psql", "-h", "localhost", "-p", "5432", "-d", self.dbname] + run(command2,stdin=p.stdout) + + + return identifier + + + def initdb(self): + + from sqlalchemy.schema import CreateSchema + + dbsettings = "db" + connstr = 'postgresql://{}:{}@{}:{}/{}'.format( + config.get_config_value(dbsettings, "user"), + config.get_config_value(dbsettings, "password"), + config.get_config_value(dbsettings, "host"), + config.get_config_value(dbsettings, "port"), + config.get_config_value(dbsettings, "dbname") + ) + + engine = sqlalchemy.create_engine(connstr) + schema_name = config.get_config_value('db', 'schema_name') + + #Create schema; if it already exists, skip this + try: + engine.execute(CreateSchema(schema_name)) + # programming error - schema already exists) + except sqlalchemy.exc.ProgrammingError: + pass \ No newline at end of file diff --git a/pywps/inout/storage/db/sqlite.py b/pywps/inout/storage/db/sqlite.py new file mode 100644 index 000000000..1b1979870 --- /dev/null +++ b/pywps/inout/storage/db/sqlite.py @@ -0,0 +1,31 @@ +################################################################## +# Copyright 2018 Open Source Geospatial Foundation and others # +# licensed under MIT, Please consult LICENSE.txt for details # +################################################################## + +import logging +from pywps import configuration as config +from .. import STORE_TYPE +from pywps.inout.formats import DATA_TYPE +from pywps.exceptions import NoApplicableCode +from . import DbStorage + + +LOGGER = logging.getLogger('PYWPS') + + +class SQLiteStorage(DbStorage): + + def __init__(self): + + self.target = config.get_config_value("db", "dblocation") + + + def store_raster_output(self, file_name, identifier): + + from subprocess import call + + call(["gdal_translate", "-of", "Rasterlite", file_name, "RASTERLITE:" + self.target + ",table=" + identifier]) + + # returns process identifier (defined within the process) + return identifier diff --git a/pywps/inout/storage.py b/pywps/inout/storage/file.py similarity index 81% rename from pywps/inout/storage.py rename to pywps/inout/storage/file.py index 53e37c6de..d9a0171db 100644 --- a/pywps/inout/storage.py +++ b/pywps/inout/storage/file.py @@ -5,53 +5,14 @@ import logging import os -from abc import ABCMeta, abstractmethod from pywps._compat import urljoin from pywps.exceptions import NotEnoughStorage from pywps import configuration as config +from . import StorageAbstract, STORE_TYPE LOGGER = logging.getLogger('PYWPS') -class STORE_TYPE: - PATH = 0 -# TODO: cover with tests - - -class StorageAbstract(object): - """Data storage abstract class - """ - - __metaclass__ = ABCMeta - - @abstractmethod - def store(self, output): - """ - :param output: of type IOHandler - :returns: (type, store, url) where - type - is type of STORE_TYPE - number - store - string describing storage - file name, database connection - url - url, where the data can be downloaded - """ - pass - - -class DummyStorage(StorageAbstract): - """Dummy empty storage implementation, does nothing - - Default instance, for non-reference output request - - >>> store = DummyStorage() - >>> assert store.store - """ - - def __init__(self): - """ - """ - - def store(self, ouput): - pass - class FileStorage(StorageAbstract): """File storage implementation, stores data to file system @@ -88,7 +49,7 @@ def store(self, output): import math import shutil import tempfile - import uuid + import uuid file_name = output.file @@ -151,4 +112,4 @@ def get_free_space(folder): free_space = os.statvfs(folder).f_bfree LOGGER.debug('Free space: %s', free_space) - return free_space + return free_space \ No newline at end of file diff --git a/tests/data/other/corn.csv b/tests/data/other/corn.csv new file mode 100644 index 000000000..58245d350 --- /dev/null +++ b/tests/data/other/corn.csv @@ -0,0 +1,106 @@ +"Program","Year","Period","Week Ending","Geo Level","State","State ANSI","Ag District","Ag District Code","County","County ANSI","Zip Code","Region","watershed_code","Watershed","Commodity","Data Item","Domain","Domain Category","Value","CV (%)" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","BARTON","009","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","23,286","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","DICKINSON","041","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","13,452","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","ELLIS","051","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","1,406","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","ELLSWORTH","053","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","2,284","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","LINCOLN","105","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","2,094","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","MARION","115","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","35,242","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","MCPHERSON","113","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","39,661","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","RICE","159","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","30,347","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","RUSH","165","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","7,735","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","RUSSELL","167","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","3,801","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","CENTRAL","50","SALINE","169","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","4,776","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","ANDERSON","003","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","56,288","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","CHASE","017","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","4,661","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","COFFEY","031","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","27,447","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","DOUGLAS","045","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","31,483","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","FRANKLIN","059","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","32,489","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","GEARY","061","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","9,634","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","JOHNSON","091","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","10,818","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","LINN","107","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","21,805","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","LYON","111","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","25,472","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","MIAMI","121","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","30,557","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","MORRIS","127","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","13,255","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","OSAGE","139","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","51,759","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","SHAWNEE","177","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","37,779","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","EAST CENTRAL","80","WABAUNSEE","197","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","14,776","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","CLAY","027","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","32,427","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","CLOUD","029","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","22,658","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","JEWELL","089","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","31,590","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","MITCHELL","123","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","15,838","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","OSBORNE","141","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","7,811","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","OTTAWA","143","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","10,603","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","PHILLIPS","147","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","27,554","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","REPUBLIC","157","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","64,432","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","ROOKS","163","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","7,146","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","SMITH","183","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","33,082","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTH CENTRAL","40","WASHINGTON","201","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","56,372","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","ATCHISON","005","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","57,143","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","BROWN","013","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","102,394","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","DONIPHAN","043","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","67,945","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","JACKSON","085","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","23,097","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","JEFFERSON","087","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","33,661","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","LEAVENWORTH","103","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","15,751","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","MARSHALL","117","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","83,928","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","NEMAHA","131","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","89,818","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","POTTAWATOMIE","149","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","35,558","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","RILEY","161","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","11,438","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHEAST","70","WYANDOTTE","209","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","1,741","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHWEST","10","CHEYENNE","023","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","53,256","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHWEST","10","DECATUR","039","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","79,490","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHWEST","10","GRAHAM","065","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","28,495","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHWEST","10","NORTON","137","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","67,620","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHWEST","10","RAWLINS","153","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","66,074","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHWEST","10","SHERIDAN","179","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","123,299","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHWEST","10","SHERMAN","181","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","108,802","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","NORTHWEST","10","THOMAS","193","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","171,616","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","BARBER","007","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","6,736","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","COMANCHE","033","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","1,921","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","EDWARDS","047","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","74,394","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","HARPER","077","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","4,384","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","HARVEY","079","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","57,651","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","KINGMAN","095","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","12,877","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","KIOWA","097","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","23,458","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","PAWNEE","145","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","38,920","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","PRATT","151","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","56,145","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","RENO","155","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","30,106","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","SEDGWICK","173","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","37,730","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","STAFFORD","185","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","56,586","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTH CENTRAL","60","SUMNER","191","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","35,362","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","ALLEN","001","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","23,937","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","BOURBON","011","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","13,622","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","BUTLER","015","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","69,751","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","CHAUTAUQUA","019","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","3,100","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","CHEROKEE","021","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","54,289","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","COWLEY","035","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","17,558","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","CRAWFORD","037","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","51,857","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","ELK","049","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","4,108","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","GREENWOOD","073","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","10,716","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","LABETTE","099","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","42,635","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","MONTGOMERY","125","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","34,990","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","NEOSHO","133","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","36,161","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","WILSON","205","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","36,376","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHEAST","90","WOODSON","207","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","30,280","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","CLARK","025","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","1,285","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","FINNEY","055","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","92,465","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","FORD","057","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","41,013","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","GRANT","067","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","47,834","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","GRAY","069","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","58,589","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","HAMILTON","075","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","10,780","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","HASKELL","081","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","60,344","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","HODGEMAN","083","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","11,093","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","KEARNY","093","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","22,959","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","MEADE","119","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","94,825","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","MORTON","129","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","28,394","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","SEWARD","175","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","50,062","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","STANTON","187","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","57,241","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","SOUTHWEST","30","STEVENS","189","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","115,242","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","GOVE","063","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","76,031","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","GREELEY","071","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","31,939","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","LANE","101","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","11,024","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","LOGAN","109","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","58,078","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","NESS","135","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","3,679","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","SCOTT","171","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","34,315","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","TREGO","195","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","7,589","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","WALLACE","199","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","64,455","15.4" +"CENSUS","2012","YEAR","","COUNTY","KANSAS","20","WEST CENTRAL","20","WICHITA","203","","","00000000","","CORN","CORN, GRAIN - ACRES HARVESTED","TOTAL","NOT SPECIFIED","40,630","15.4" diff --git a/tests/data/other/test.txt b/tests/data/other/test.txt new file mode 100644 index 000000000..be1c84eaa --- /dev/null +++ b/tests/data/other/test.txt @@ -0,0 +1,2 @@ +"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." + diff --git a/tests/test_formats.py b/tests/test_formats.py index 1140e9d88..2171ae424 100644 --- a/tests/test_formats.py +++ b/tests/test_formats.py @@ -83,6 +83,7 @@ def test_json_in(self): injson['extension'] = '.gml' injson['mime_type'] = 'application/gml+xml' injson['encoding'] = 'utf-8' + injson['data_type'] = 'VECTOR' frmt = Format(injson['mime_type']) frmt.json = injson diff --git a/tests/test_storage.py b/tests/test_storage.py new file mode 100644 index 000000000..46726f1d8 --- /dev/null +++ b/tests/test_storage.py @@ -0,0 +1,371 @@ +import unittest +import atexit +import shutil +import tempfile +import sqlalchemy +from sqlalchemy import create_engine, inspect +from pywps import FORMATS +from pywps.inout.storage import DummyStorage, STORE_TYPE +from pywps.inout.storage.file import FileStorage +from pywps.inout.storage.db.pg import PgStorage +from pywps.inout.storage.db.sqlite import SQLiteStorage +from pywps.inout.storage.db import DbStorage +from pywps import ComplexOutput +import os +from pywps import configuration + + + +TEMP_DIRS=[] + +def clear(): + """Delete temporary files + """ + for d in TEMP_DIRS: + shutil.rmtree(d) + +atexit.register(clear) + +def get_vector_file(): + + return os.path.join(os.path.dirname(__file__), "data", "gml", "point.gml") + +def get_raster_file(): + + return os.path.join(os.path.dirname(__file__), "data", "geotiff", "dem.tiff") + + +def get_text_file(): + + return os.path.join(os.path.dirname(__file__), "data", "other", "test.txt") + +def get_csv_file(): + + return os.path.join(os.path.dirname(__file__), "data", "other", "corn.csv") + + + + +def set_test_configuration(): + configuration.CONFIG.set("server", "store_type", "db") + #when add_section('db') -> duplicate error, section db exists ; if not -> no section db ; section created in configuration.py + #configuration.CONFIG.add_section('db') + configuration.CONFIG.set("db", "db_type", "pg") + configuration.CONFIG.set("db", "dbname", "pisl") + configuration.CONFIG.set("db", "user", "pisl") + configuration.CONFIG.set("db", "password", "password") + configuration.CONFIG.set("db", "host", "localhost") + configuration.CONFIG.set("db", "port", "5432") + configuration.CONFIG.set("db", "schema_name", "test_schema") + + +class DummyStorageTestCase(unittest.TestCase): + """Storage test case + """ + + def setUp(self): + global TEMP_DIRS + tmp_dir = tempfile.mkdtemp() + TEMP_DIRS.append(tmp_dir) + + self.storage = DummyStorage() + + def tearDown(self): + pass + + def test_dummy_storage(self): + assert isinstance(self.storage, DummyStorage) + + + def test_store(self): + vector_output = ComplexOutput('vector', 'Vector output', + supported_formats=[FORMATS.GML]) + vector_output.file = get_vector_file() + assert not self.storage.store("some data") + + +class FileStorageTestCase(unittest.TestCase): + """FileStorage tests + """ + + def setUp(self): + global TEMP_DIRS + tmp_dir = tempfile.mkdtemp() + TEMP_DIRS.append(tmp_dir) + + self.storage = FileStorage() + + def tearDown(self): + pass + + def test_file_storage(self): + assert isinstance(self.storage, FileStorage) + + + def test_store(self): + vector_output = ComplexOutput('vector', 'Vector output', + supported_formats=[FORMATS.GML]) + vector_output.file = get_vector_file() + + store_file = self.storage.store(vector_output) + assert len(store_file) == 3 + assert store_file[0] == STORE_TYPE.PATH + assert isinstance(store_file[1], str) + assert isinstance(store_file[2], str) + + +class PgStorageTestCase(unittest.TestCase): + """PgStorage test + """ + + def setUp(self): + global TEMP_DIRS + tmp_dir = tempfile.mkdtemp() + TEMP_DIRS.append(tmp_dir) + set_test_configuration() + self.storage = PgStorage() + + + + dbsettings = "db" + self.dbname = configuration.get_config_value(dbsettings, "dbname") + self.user = configuration.get_config_value(dbsettings, "user") + self.password = configuration.get_config_value(dbsettings, "password") + self.host = configuration.get_config_value(dbsettings, "host") + self.port = configuration.get_config_value(dbsettings, "port") + + + + self.storage.target = "dbname={} user={} password={} host={} port={}".format( + self.dbname, self.user, self.password, self.host, self.port + ) + + + self.storage.schema_name = configuration.get_config_value("db", "schema_name") + self.storage.dbname = configuration.get_config_value("db", "dbname") + + def tearDown(self): + pass + + def test_pg_storage(self): + assert isinstance(self.storage, PgStorage) + + + def test_store_vector(self): + + vector_output = ComplexOutput('vector', 'Vector output', + supported_formats=[FORMATS.GML]) + vector_output.file = get_vector_file() + vector_output.output_format = FORMATS.GML + store_vector = self.storage.store(vector_output) + + assert len(store_vector) == 3 + assert store_vector[0] == STORE_TYPE.DB + assert isinstance(store_vector[1], str) + assert isinstance(store_vector[2], str) + + # Parse reference into dbname, schema and table + reference = store_vector[2].split(".") + + db_url = "postgresql://{}:{}@{}:{}/{}".format( + reference[0], self.password, self.host, self.port, self.user + ) + engine = create_engine(db_url) + # check if table exists + ins = inspect(engine) + assert (reference[2] in ins.get_table_names(schema=reference[1])) + + + def test_store_raster(self): + raster_output = ComplexOutput('raster', 'Raster output', + supported_formats=[FORMATS.GEOTIFF]) + raster_output.file = get_raster_file() + raster_output.output_format = FORMATS.GEOTIFF + + store_raster = self.storage.store(raster_output) + + assert len(store_raster) == 3 + assert store_raster[0] == STORE_TYPE.DB + assert isinstance(store_raster[1], str) + assert isinstance(store_raster[2], str) + + # Parse reference into dbname, schema and table + reference = store_raster[2].split(".") + + db_url = "postgresql://{}:{}@{}:{}/{}".format( + reference[0], self.password, self.host, self.port, self.user + ) + engine = create_engine(db_url) + # check if table exists + ins = inspect(engine) + assert (reference[2] in ins.get_table_names(schema=reference[1])) + + + def test_store_other(self): + text_output = ComplexOutput('txt', 'Plain text output', + supported_formats=[FORMATS.TEXT]) + text_output.file = get_text_file() + text_output.output_format = FORMATS.TEXT + + store_text = self.storage.store(text_output) + + assert len(store_text) == 3 + assert store_text[0] == STORE_TYPE.DB + assert isinstance(store_text[1], str) + assert isinstance(store_text[2], str) + + # Parse reference into dbname, schema and table + reference = store_text[2].split(".") + + db_url = "postgresql://{}:{}@{}:{}/{}".format( + reference[0], self.password, self.host, self.port, self.user + ) + engine = create_engine(db_url) + # check if table exists + ins = inspect(engine) + assert (reference[2] in ins.get_table_names(schema=reference[1])) + + + csv_output = ComplexOutput('csv', 'CSV output', + supported_formats=[FORMATS.CSV]) + csv_output.file = get_csv_file() + csv_output.output_format = FORMATS.CSV + + store_csv = self.storage.store(csv_output) + + assert len(store_csv) == 3 + assert store_csv[0] == STORE_TYPE.DB + assert isinstance(store_csv[1], str) + assert isinstance(store_csv[2], str) + + # Parse reference into dbname, schema and table + reference = store_csv[2].split(".") + + db_url = "postgresql://{}:{}@{}:{}/{}".format( + reference[0], self.password, self.host, self.port, self.user + ) + engine = create_engine(db_url) + # check if table exists + ins = inspect(engine) + assert (reference[2] in ins.get_table_names(schema=reference[1])) + + +class SQLiteStorageTestCase(unittest.TestCase): + """SQLiteStorage test + """ + + def setUp(self): + global TEMP_DIRS + tmp_dir = tempfile.mkdtemp() + TEMP_DIRS.append(tmp_dir) + + self.storage = SQLiteStorage() + self.storage.target = tempfile.mktemp(suffix='.sqlite', prefix='pywpsdb-') + + + def tearDown(self): + # Delete temp file if exists + try: + os.remove(self.storage.target) + except: + pass + + def test_sqlite_storage(self): + assert isinstance(self.storage, SQLiteStorage) + + + def test_store_vector(self): + vector_output = ComplexOutput('vector', 'Vector output', + supported_formats=[FORMATS.GML]) + vector_output.file = get_vector_file() + vector_output.output_format = FORMATS.GML + store_vector = self.storage.store(vector_output) + + assert len(store_vector) == 3 + assert store_vector[0] == STORE_TYPE.DB + assert isinstance(store_vector[1], str) + assert isinstance(store_vector[2], str) + + # Parse reference into path to db and table + reference = store_vector[2].rsplit(".", 1) + + db_url = "sqlite:///{}".format(reference[0]) + engine = create_engine(db_url) + # check if table exists + ins = inspect(engine) + assert (reference[1] in ins.get_table_names()) + + + def test_store_raster(self): + raster_output = ComplexOutput('raster', 'Raster output', + supported_formats=[FORMATS.GEOTIFF]) + raster_output.file = get_raster_file() + raster_output.output_format = FORMATS.GEOTIFF + + store_raster = self.storage.store(raster_output) + + assert len(store_raster) == 3 + assert store_raster[0] == STORE_TYPE.DB + assert isinstance(store_raster[1], str) + assert isinstance(store_raster[2], str) + + # Parse reference into path to db and table + reference = store_raster[2].rsplit(".", 1) + + db_url = "sqlite:///{}".format(reference[0]) + engine = create_engine(db_url) + # check if table exists + ins = inspect(engine) + + assert (reference[1] + "_rasters") in ins.get_table_names() + + + + def test_store_other(self): + + # Test text output + text_output = ComplexOutput('txt', 'Plain text output', + supported_formats=[FORMATS.TEXT]) + text_output.file = get_text_file() + text_output.output_format = FORMATS.TEXT + + store_text = self.storage.store(text_output) + + assert len(store_text) == 3 + assert store_text[0] == STORE_TYPE.DB + assert isinstance(store_text[1], str) + assert isinstance(store_text[2], str) + + # Parse reference into path to db and table + reference = store_text[2].rsplit(".", 1) + + db_url = "sqlite:///{}".format(reference[0]) + engine = create_engine(db_url) + # check if table exists + ins = inspect(engine) + assert (reference[1] in ins.get_table_names()) + + + # Test CSV output + csv_output = ComplexOutput('csv', 'CSV output', + supported_formats=[FORMATS.CSV]) + csv_output.file = get_csv_file() + csv_output.output_format = FORMATS.CSV + + store_csv = self.storage.store(csv_output) + + assert len(store_csv) == 3 + assert store_csv[0] == STORE_TYPE.DB + assert isinstance(store_csv[1], str) + assert isinstance(store_csv[2], str) + + # Parse reference into path to db and table + reference = store_csv[2].rsplit(".", 1) + + db_url = "sqlite:///{}".format(reference[0]) + + engine = create_engine(db_url) + # check if table exists + ins = inspect(engine) + assert (reference[1] in ins.get_table_names()) + From 3208c76ab4ab463ddfcacf1c4d0c8f055cc2c8d7 Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 13:13:17 +0200 Subject: [PATCH 02/21] data_type is the last parameter --- pywps/inout/formats/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pywps/inout/formats/__init__.py b/pywps/inout/formats/__init__.py index 07c02446e..8531ffb40 100644 --- a/pywps/inout/formats/__init__.py +++ b/pywps/inout/formats/__init__.py @@ -47,10 +47,10 @@ class Format(object): :param str extension: file extension """ - def __init__(self, mime_type, data_type=None, + def __init__(self, mime_type, schema=None, encoding=None, validate=emptyvalidator, mode=MODE.SIMPLE, - extension=None): + extension=None, data_type=None): """Constructor """ From ef961350b9c17fd6ed0d2c3956bda9c704adfd91 Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 13:20:05 +0200 Subject: [PATCH 03/21] fix indent. --- pywps/inout/storage/file.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pywps/inout/storage/file.py b/pywps/inout/storage/file.py index d9a0171db..6a8b1a346 100644 --- a/pywps/inout/storage/file.py +++ b/pywps/inout/storage/file.py @@ -49,7 +49,7 @@ def store(self, output): import math import shutil import tempfile - import uuid + import uuid file_name = output.file @@ -112,4 +112,4 @@ def get_free_space(folder): free_space = os.statvfs(folder).f_bfree LOGGER.debug('Free space: %s', free_space) - return free_space \ No newline at end of file + return free_space From 019cb6eb51c6b5cf157fef0ae6fc3c7bfe9257cd Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 13:20:57 +0200 Subject: [PATCH 04/21] use lowercase --- tests/test_formats.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_formats.py b/tests/test_formats.py index 2171ae424..9541568da 100644 --- a/tests/test_formats.py +++ b/tests/test_formats.py @@ -83,7 +83,7 @@ def test_json_in(self): injson['extension'] = '.gml' injson['mime_type'] = 'application/gml+xml' injson['encoding'] = 'utf-8' - injson['data_type'] = 'VECTOR' + injson['data_type'] = 'vector' frmt = Format(injson['mime_type']) frmt.json = injson From 2d1e9f51972d3341cf48e66ccbe8bbbf4a7fdb2a Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 13:42:36 +0200 Subject: [PATCH 05/21] remove redundant import --- pywps/inout/storage/db/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pywps/inout/storage/db/__init__.py b/pywps/inout/storage/db/__init__.py index 801bc5963..28fc92fc3 100644 --- a/pywps/inout/storage/db/__init__.py +++ b/pywps/inout/storage/db/__init__.py @@ -6,7 +6,6 @@ import logging from abc import ABCMeta, abstractmethod from pywps import configuration as config -from pywps.inout.storage import db from pywps.inout.formats import DATA_TYPE from .. import STORE_TYPE from .. import StorageAbstract From f642172bf958ddb3029dd2b5782eac9108f8e8ed Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 14:13:31 +0200 Subject: [PATCH 06/21] formatting - fix indent, remove whitespaces --- pywps/configuration.py | 3 +-- pywps/inout/formats/__init__.py | 6 ++--- pywps/inout/outputs.py | 1 - pywps/inout/storage/__init__.py | 6 ----- pywps/inout/storage/db/__init__.py | 39 +++++++++++++++--------------- pywps/inout/storage/db/pg.py | 17 ++++++------- pywps/inout/storage/db/sqlite.py | 2 -- pywps/inout/storage/file.py | 2 -- 8 files changed, 29 insertions(+), 47 deletions(-) diff --git a/pywps/configuration.py b/pywps/configuration.py index 5771434ad..bf8fff395 100755 --- a/pywps/configuration.py +++ b/pywps/configuration.py @@ -130,9 +130,8 @@ def load_configuration(cfgfiles=None): CONFIG.add_section('grass') CONFIG.set('grass', 'gisbase', '') - - CONFIG.add_section('db') + CONFIG.add_section('db') if not cfgfiles: cfgfiles = _get_default_config_files_location() diff --git a/pywps/inout/formats/__init__.py b/pywps/inout/formats/__init__.py index 8531ffb40..8ac8674b4 100644 --- a/pywps/inout/formats/__init__.py +++ b/pywps/inout/formats/__init__.py @@ -21,6 +21,7 @@ 'WFS110, WFS20, WMS, WMS130, WMS110,' 'WMS100, TEXT, CSV, NETCDF, LAZ, LAS') + # this should be Enum type (only compatible with Python 3) class DATA_TYPE(object): VECTOR = 0 @@ -75,7 +76,6 @@ def mime_type(self): return self._mime_type - @property def data_type(self): """Get format data type @@ -83,7 +83,6 @@ def data_type(self): return self._data_type - @data_type.setter def data_type(self, data_type): """Set format encoding @@ -91,7 +90,6 @@ def data_type(self, data_type): self._data_type = data_type - @mime_type.setter def mime_type(self, mime_type): """Set format mime type @@ -212,7 +210,7 @@ def json(self, jsonin): Format('text/csv', extension='.csv', data_type=DATA_TYPE.OTHER), Format('application/x-netcdf', extension='.nc', data_type=DATA_TYPE.VECTOR), Format('application/octet-stream', extension='.laz', data_type=DATA_TYPE.VECTOR), - Format('application/octet-stream', extension='.las', data_type=DATA_TYPE.VECTOR), + Format('application/octet-stream', extension='.las', data_type=DATA_TYPE.VECTOR), ) diff --git a/pywps/inout/outputs.py b/pywps/inout/outputs.py index c909c788f..f3894c6a9 100644 --- a/pywps/inout/outputs.py +++ b/pywps/inout/outputs.py @@ -111,7 +111,6 @@ def _json_reference(self, data): else: self.storage = FileStorage() - """ to be implemented: elif store_type == 's3' and \ diff --git a/pywps/inout/storage/__init__.py b/pywps/inout/storage/__init__.py index 2024410f0..13fc22478 100644 --- a/pywps/inout/storage/__init__.py +++ b/pywps/inout/storage/__init__.py @@ -5,13 +5,10 @@ from abc import ABCMeta, abstractmethod - - class STORE_TYPE: PATH = 0 DB = 1 - class StorageAbstract(object): """Data storage abstract class """ @@ -29,8 +26,6 @@ def store(self, output): """ pass - - class DummyStorage(StorageAbstract): """Dummy empty storage implementation, does nothing @@ -46,4 +41,3 @@ def __init__(self): def store(self, ouput): pass - diff --git a/pywps/inout/storage/db/__init__.py b/pywps/inout/storage/db/__init__.py index 28fc92fc3..8861bf18a 100644 --- a/pywps/inout/storage/db/__init__.py +++ b/pywps/inout/storage/db/__init__.py @@ -7,6 +7,7 @@ from abc import ABCMeta, abstractmethod from pywps import configuration as config from pywps.inout.formats import DATA_TYPE +from pywps.exceptions import NoApplicableCode from .. import STORE_TYPE from .. import StorageAbstract import sqlalchemy @@ -18,13 +19,11 @@ class DbStorage(StorageAbstract): def __init__(self): - # get db_type from configuration + # get db_type from configuration try: self.db_type = config.get_config_value('db', 'db_type').lower() except KeyError: - raise exception("Database type has not been specified") - - + raise Exception("Database type has not been specified") @staticmethod def get_db_type(): @@ -37,14 +36,13 @@ def get_db_type(): elif db_type == "sqlite": storage = sqlite.SQLiteStorage() else: - raise Exception("Unknown database type: '{}'".format(self.db_type)) + raise Exception("Unknown database type: '{}'".format(config.get_config_value('db', 'db_type').lower())) return storage def initdb(self): pass - def store(self, output): """ Creates reference that is returned to the client """ @@ -66,7 +64,7 @@ def store(self, output): elif isinstance(self, pg.PgStorage): url = '{}.{}.{}'.format(self.dbname, self.schema_name, output.identifier) - # returns value for database storage defined in the STORE_TYPE class, + # returns value for database storage defined in the STORE_TYPE class, # name of the output file and a reference return (STORE_TYPE.DB, output.file, url) @@ -80,7 +78,7 @@ def store_vector_output(self, file_name, identifier): drv = ogr.GetDriverByName("SQLite") dsc_out = drv.CreateDataSource(self.target) elif isinstance(self, pg.PgStorage): - dsc_out = ogr.Open("PG:" + self.target) + dsc_out = ogr.Open("PG:" + self.target) # connect to a database and copy output there LOGGER.debug("Database: {}".format(self.target)) @@ -94,18 +92,16 @@ def store_vector_output(self, file_name, identifier): if layer is None: raise Exception("Writing output data to the database failed.") - + dsc_out.Destroy() dsc_in.Destroy() # returns process identifier (defined within the process) return identifier - def store_raster_output(self, file_name, identifier): pass - def store_other_output(self, file_name, identifier, uuid): from pywps import configuration as config @@ -119,12 +115,16 @@ def store_other_output(self, file_name, identifier, uuid): engine = sqlalchemy.create_engine("sqlite:///{}".format(self.target)) elif isinstance(self, pg.PgStorage): engine = sqlalchemy.create_engine('postgresql://{}:{}@{}:{}/{}'.format( - self.dbname,self.password,self.host,self.port,self.user - ) - ) + self.dbname, + self.password, + self.host, + self.port, + self.user + ) + ) # Create table - class Other_output(base): + class Other_output(base): __tablename__ = identifier if isinstance(self, pg.PgStorage): __table_args__ = {'schema' : self.schema_name} @@ -135,7 +135,7 @@ class Other_output(base): data = Column(LargeBinary) timestamp = Column(DateTime(timezone=True), server_default=func.now()) - Session = sessionmaker(engine) + Session = sessionmaker(engine) session = Session() base.metadata.create_all(engine) @@ -145,9 +145,8 @@ class Other_output(base): out = data.read() # Add data to table - output = Other_output(uuid=uuid, data=out) - session.add(output) + output = Other_output(uuid=uuid, data=out) + session.add(output) session.commit() - - return identifier \ No newline at end of file + return identifier diff --git a/pywps/inout/storage/db/pg.py b/pywps/inout/storage/db/pg.py index d1d5c4043..c64369498 100644 --- a/pywps/inout/storage/db/pg.py +++ b/pywps/inout/storage/db/pg.py @@ -13,6 +13,7 @@ LOGGER = logging.getLogger('PYWPS') + class PgStorage(DbStorage): def __init__(self): @@ -25,31 +26,27 @@ def __init__(self): self.host = config.get_config_value(dbsettings, "host") self.port = config.get_config_value(dbsettings, "port") - self.target = "dbname={} user={} password={} host={} port={}".format( self.dbname, self.user, self.password, self.host, self.port ) - self.schema_name = config.get_config_value(dbsettings, "schema_name") + self.schema_name = config.get_config_value(dbsettings, "schema_name") self.initdb() - def store_raster_output(self, file_name, identifier): from subprocess import call, run, Popen, PIPE # Convert raster to an SQL query - command1 = ["raster2pgsql", "-a", file_name, self.schema_name + "." + identifier] - p = Popen(command1,stdout=PIPE) + command1 = ["raster2pgsql", "-a", file_name, self.schema_name + "." + identifier] + p = Popen(command1, stdout=PIPE) # Apply the SQL query command2 = ["psql", "-h", "localhost", "-p", "5432", "-d", self.dbname] - run(command2,stdin=p.stdout) - + run(command2, stdin=p.stdout) return identifier - def initdb(self): from sqlalchemy.schema import CreateSchema @@ -66,9 +63,9 @@ def initdb(self): engine = sqlalchemy.create_engine(connstr) schema_name = config.get_config_value('db', 'schema_name') - #Create schema; if it already exists, skip this + # Create schema; if it already exists, skip this try: engine.execute(CreateSchema(schema_name)) # programming error - schema already exists) except sqlalchemy.exc.ProgrammingError: - pass \ No newline at end of file + pass diff --git a/pywps/inout/storage/db/sqlite.py b/pywps/inout/storage/db/sqlite.py index 1b1979870..9ec926308 100644 --- a/pywps/inout/storage/db/sqlite.py +++ b/pywps/inout/storage/db/sqlite.py @@ -10,7 +10,6 @@ from pywps.exceptions import NoApplicableCode from . import DbStorage - LOGGER = logging.getLogger('PYWPS') @@ -20,7 +19,6 @@ def __init__(self): self.target = config.get_config_value("db", "dblocation") - def store_raster_output(self, file_name, identifier): from subprocess import call diff --git a/pywps/inout/storage/file.py b/pywps/inout/storage/file.py index 6a8b1a346..9ef3a7225 100644 --- a/pywps/inout/storage/file.py +++ b/pywps/inout/storage/file.py @@ -12,8 +12,6 @@ LOGGER = logging.getLogger('PYWPS') - - class FileStorage(StorageAbstract): """File storage implementation, stores data to file system From 9d591ff8d0ebaab65899dc5def91dd5af4c03af8 Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 14:32:51 +0200 Subject: [PATCH 07/21] improved formatting --- pywps/inout/storage/__init__.py | 3 +++ pywps/inout/storage/db/__init__.py | 10 ++++------ pywps/inout/storage/file.py | 1 + 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/pywps/inout/storage/__init__.py b/pywps/inout/storage/__init__.py index 13fc22478..73fc626f6 100644 --- a/pywps/inout/storage/__init__.py +++ b/pywps/inout/storage/__init__.py @@ -5,10 +5,12 @@ from abc import ABCMeta, abstractmethod + class STORE_TYPE: PATH = 0 DB = 1 + class StorageAbstract(object): """Data storage abstract class """ @@ -26,6 +28,7 @@ def store(self, output): """ pass + class DummyStorage(StorageAbstract): """Dummy empty storage implementation, does nothing diff --git a/pywps/inout/storage/db/__init__.py b/pywps/inout/storage/db/__init__.py index 8861bf18a..895ee6182 100644 --- a/pywps/inout/storage/db/__init__.py +++ b/pywps/inout/storage/db/__init__.py @@ -68,10 +68,9 @@ def store(self, output): # name of the output file and a reference return (STORE_TYPE.DB, output.file, url) - def store_vector_output(self, file_name, identifier): """ Open output file, connect to SQLite database and copiy data there - """ + """ from osgeo import ogr if isinstance(self, sqlite.SQLiteStorage): @@ -104,9 +103,9 @@ def store_raster_output(self, file_name, identifier): def store_other_output(self, file_name, identifier, uuid): - from pywps import configuration as config + from pywps import configuration as config from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine - from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker base = declarative_base() @@ -127,8 +126,7 @@ def store_other_output(self, file_name, identifier, uuid): class Other_output(base): __tablename__ = identifier if isinstance(self, pg.PgStorage): - __table_args__ = {'schema' : self.schema_name} - + __table_args__ = {'schema': self.schema_name} primary_key = Column(Integer, primary_key=True) uuid = Column(String(64)) diff --git a/pywps/inout/storage/file.py b/pywps/inout/storage/file.py index 9ef3a7225..9d5e1102b 100644 --- a/pywps/inout/storage/file.py +++ b/pywps/inout/storage/file.py @@ -12,6 +12,7 @@ LOGGER = logging.getLogger('PYWPS') + class FileStorage(StorageAbstract): """File storage implementation, stores data to file system From 4b8c537f79dd3cf7b8a5423b6caa646299b89e30 Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 14:38:41 +0200 Subject: [PATCH 08/21] fix import --- pywps/inout/storage/db/__init__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pywps/inout/storage/db/__init__.py b/pywps/inout/storage/db/__init__.py index 895ee6182..346f8e761 100644 --- a/pywps/inout/storage/db/__init__.py +++ b/pywps/inout/storage/db/__init__.py @@ -11,7 +11,7 @@ from .. import STORE_TYPE from .. import StorageAbstract import sqlalchemy - +from . import sqlite,pg LOGGER = logging.getLogger('PYWPS') @@ -27,8 +27,6 @@ def __init__(self): @staticmethod def get_db_type(): - from . import sqlite - from . import pg # create an instance of the appropriate class db_type = config.get_config_value('db', 'db_type').lower() if db_type == "pg": From e2a9580f3b2f761fefcb803d2cf5a1dc1821d31e Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 16:28:39 +0200 Subject: [PATCH 09/21] split code to individual classes --- pywps/inout/storage/db/__init__.py | 102 ++---------------------- pywps/inout/storage/db/pg.py | 120 +++++++++++++++++++++++++---- pywps/inout/storage/db/sqlite.py | 88 +++++++++++++++++++++ 3 files changed, 201 insertions(+), 109 deletions(-) diff --git a/pywps/inout/storage/db/__init__.py b/pywps/inout/storage/db/__init__.py index 346f8e761..96e22196b 100644 --- a/pywps/inout/storage/db/__init__.py +++ b/pywps/inout/storage/db/__init__.py @@ -11,7 +11,7 @@ from .. import STORE_TYPE from .. import StorageAbstract import sqlalchemy -from . import sqlite,pg + LOGGER = logging.getLogger('PYWPS') @@ -27,6 +27,8 @@ def __init__(self): @staticmethod def get_db_type(): + from . import sqlite + from . import pg # create an instance of the appropriate class db_type = config.get_config_value('db', 'db_type').lower() if db_type == "pg": @@ -44,105 +46,13 @@ def initdb(self): def store(self, output): """ Creates reference that is returned to the client """ - - DATA_TYPE.is_valid_datatype(output.output_format.data_type) - - if output.output_format.data_type is DATA_TYPE.VECTOR: - self.store_vector_output(output.file, output.identifier) - elif output.output_format.data_type is DATA_TYPE.RASTER: - self.store_raster_output(output.file, output.identifier) - elif output.output_format.data_type is DATA_TYPE.OTHER: - self.store_other_output(output.file, output.identifier, output.uuid) - else: - # This should never happen - raise Exception("Unknown data type") - - if isinstance(self, sqlite.SQLiteStorage): - url = '{}.{}'.format(self.target, output.identifier) - elif isinstance(self, pg.PgStorage): - url = '{}.{}.{}'.format(self.dbname, self.schema_name, output.identifier) - - # returns value for database storage defined in the STORE_TYPE class, - # name of the output file and a reference - return (STORE_TYPE.DB, output.file, url) + pass def store_vector_output(self, file_name, identifier): - """ Open output file, connect to SQLite database and copiy data there - """ - from osgeo import ogr - - if isinstance(self, sqlite.SQLiteStorage): - drv = ogr.GetDriverByName("SQLite") - dsc_out = drv.CreateDataSource(self.target) - elif isinstance(self, pg.PgStorage): - dsc_out = ogr.Open("PG:" + self.target) - - # connect to a database and copy output there - LOGGER.debug("Database: {}".format(self.target)) - dsc_in = ogr.Open(file_name) - if dsc_in is None: - raise Exception("Reading data failed.") - if dsc_out is None: - raise NoApplicableCode("Could not connect to the database.") - layer = dsc_out.CopyLayer(dsc_in.GetLayer(), identifier, - ['OVERWRITE=YES']) - - if layer is None: - raise Exception("Writing output data to the database failed.") - - dsc_out.Destroy() - dsc_in.Destroy() - - # returns process identifier (defined within the process) - return identifier + pass def store_raster_output(self, file_name, identifier): pass def store_other_output(self, file_name, identifier, uuid): - - from pywps import configuration as config - from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine - from sqlalchemy.ext.declarative import declarative_base - from sqlalchemy.orm import sessionmaker - - base = declarative_base() - - if isinstance(self, sqlite.SQLiteStorage): - engine = sqlalchemy.create_engine("sqlite:///{}".format(self.target)) - elif isinstance(self, pg.PgStorage): - engine = sqlalchemy.create_engine('postgresql://{}:{}@{}:{}/{}'.format( - self.dbname, - self.password, - self.host, - self.port, - self.user - ) - ) - - # Create table - class Other_output(base): - __tablename__ = identifier - if isinstance(self, pg.PgStorage): - __table_args__ = {'schema': self.schema_name} - - primary_key = Column(Integer, primary_key=True) - uuid = Column(String(64)) - data = Column(LargeBinary) - timestamp = Column(DateTime(timezone=True), server_default=func.now()) - - Session = sessionmaker(engine) - session = Session() - - base.metadata.create_all(engine) - - # Open file as binary - with open(file_name, "rb") as data: - out = data.read() - - # Add data to table - output = Other_output(uuid=uuid, data=out) - session.add(output) - session.commit() - - return identifier + pass \ No newline at end of file diff --git a/pywps/inout/storage/db/pg.py b/pywps/inout/storage/db/pg.py index c64369498..959f5de7d 100644 --- a/pywps/inout/storage/db/pg.py +++ b/pywps/inout/storage/db/pg.py @@ -34,19 +34,6 @@ def __init__(self): self.initdb() - def store_raster_output(self, file_name, identifier): - - from subprocess import call, run, Popen, PIPE - - # Convert raster to an SQL query - command1 = ["raster2pgsql", "-a", file_name, self.schema_name + "." + identifier] - p = Popen(command1, stdout=PIPE) - # Apply the SQL query - command2 = ["psql", "-h", "localhost", "-p", "5432", "-d", self.dbname] - run(command2, stdin=p.stdout) - - return identifier - def initdb(self): from sqlalchemy.schema import CreateSchema @@ -69,3 +56,110 @@ def initdb(self): # programming error - schema already exists) except sqlalchemy.exc.ProgrammingError: pass + + def store_vector_output(self, file_name, identifier): + """ Open output file, connect to SQLite database and copiy data there + """ + from osgeo import ogr + + dsc_out = ogr.Open("PG:" + self.target) + + # connect to a database and copy output there + LOGGER.debug("Database: {}".format(self.target)) + dsc_in = ogr.Open(file_name) + if dsc_in is None: + raise Exception("Reading data failed.") + if dsc_out is None: + raise NoApplicableCode("Could not connect to the database.") + layer = dsc_out.CopyLayer(dsc_in.GetLayer(), identifier, + ['OVERWRITE=YES']) + + if layer is None: + raise Exception("Writing output data to the database failed.") + + dsc_out.Destroy() + dsc_in.Destroy() + + # returns process identifier (defined within the process) + return identifier + + def store_raster_output(self, file_name, identifier): + + from subprocess import call, run, Popen, PIPE + + # Convert raster to an SQL query + command1 = ["raster2pgsql", "-a", file_name, self.schema_name + "." + identifier] + p = Popen(command1, stdout=PIPE) + # Apply the SQL query + command2 = ["psql", "-h", "localhost", "-p", "5432", "-d", self.dbname] + run(command2, stdin=p.stdout) + + return identifier + + def store_other_output(self, file_name, identifier, uuid): + + from pywps import configuration as config + from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.orm import sessionmaker + + base = declarative_base() + + engine = sqlalchemy.create_engine('postgresql://{}:{}@{}:{}/{}'.format( + self.dbname, + self.password, + self.host, + self.port, + self.user + ) + ) + + # Create table + class Other_output(base): + __tablename__ = identifier + if isinstance(self, pg.PgStorage): + __table_args__ = {'schema': self.schema_name} + + primary_key = Column(Integer, primary_key=True) + uuid = Column(String(64)) + data = Column(LargeBinary) + timestamp = Column(DateTime(timezone=True), server_default=func.now()) + + Session = sessionmaker(engine) + session = Session() + + base.metadata.create_all(engine) + + # Open file as binary + with open(file_name, "rb") as data: + out = data.read() + + # Add data to table + output = Other_output(uuid=uuid, data=out) + session.add(output) + session.commit() + + return identifier + + def store(self, output): + """ Creates reference that is returned to the client + """ + + DATA_TYPE.is_valid_datatype(output.output_format.data_type) + + if output.output_format.data_type is DATA_TYPE.VECTOR: + self.store_vector_output(output.file, output.identifier) + elif output.output_format.data_type is DATA_TYPE.RASTER: + self.store_raster_output(output.file, output.identifier) + elif output.output_format.data_type is DATA_TYPE.OTHER: + self.store_other_output(output.file, output.identifier, output.uuid) + else: + # This should never happen + raise Exception("Unknown data type") + + + url = '{}.{}.{}'.format(self.dbname, self.schema_name, output.identifier) + + # returns value for database storage defined in the STORE_TYPE class, + # name of the output file and a reference + return (STORE_TYPE.DB, output.file, url) diff --git a/pywps/inout/storage/db/sqlite.py b/pywps/inout/storage/db/sqlite.py index 9ec926308..fd36425e0 100644 --- a/pywps/inout/storage/db/sqlite.py +++ b/pywps/inout/storage/db/sqlite.py @@ -19,6 +19,33 @@ def __init__(self): self.target = config.get_config_value("db", "dblocation") + def store_vector_output(self, file_name, identifier): + """ Open output file, connect to SQLite database and copiy data there + """ + from osgeo import ogr + + drv = ogr.GetDriverByName("SQLite") + dsc_out = drv.CreateDataSource(self.target) + + # connect to a database and copy output there + LOGGER.debug("Database: {}".format(self.target)) + dsc_in = ogr.Open(file_name) + if dsc_in is None: + raise Exception("Reading data failed.") + if dsc_out is None: + raise NoApplicableCode("Could not connect to the database.") + layer = dsc_out.CopyLayer(dsc_in.GetLayer(), identifier, + ['OVERWRITE=YES']) + + if layer is None: + raise Exception("Writing output data to the database failed.") + + dsc_out.Destroy() + dsc_in.Destroy() + + # returns process identifier (defined within the process) + return identifier + def store_raster_output(self, file_name, identifier): from subprocess import call @@ -27,3 +54,64 @@ def store_raster_output(self, file_name, identifier): # returns process identifier (defined within the process) return identifier + + def store_other_output(self, file_name, identifier, uuid): + + from pywps import configuration as config + from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.orm import sessionmaker + + base = declarative_base() + + engine = sqlalchemy.create_engine("sqlite:///{}".format(self.target)) + + # Create table + class Other_output(base): + __tablename__ = identifier + if isinstance(self, pg.PgStorage): + __table_args__ = {'schema': self.schema_name} + + primary_key = Column(Integer, primary_key=True) + uuid = Column(String(64)) + data = Column(LargeBinary) + timestamp = Column(DateTime(timezone=True), server_default=func.now()) + + Session = sessionmaker(engine) + session = Session() + + base.metadata.create_all(engine) + + # Open file as binary + with open(file_name, "rb") as data: + out = data.read() + + # Add data to table + output = Other_output(uuid=uuid, data=out) + session.add(output) + session.commit() + + return identifier + + def store(self, output): + """ Creates reference that is returned to the client + """ + + DATA_TYPE.is_valid_datatype(output.output_format.data_type) + + if output.output_format.data_type is DATA_TYPE.VECTOR: + self.store_vector_output(output.file, output.identifier) + elif output.output_format.data_type is DATA_TYPE.RASTER: + self.store_raster_output(output.file, output.identifier) + elif output.output_format.data_type is DATA_TYPE.OTHER: + self.store_other_output(output.file, output.identifier, output.uuid) + else: + # This should never happen + raise Exception("Unknown data type") + + url = '{}.{}'.format(self.target, output.identifier) + + + # returns value for database storage defined in the STORE_TYPE class, + # name of the output file and a reference + return (STORE_TYPE.DB, output.file, url) From 43f90421ad06137cc252faee8e711886f70a35f1 Mon Sep 17 00:00:00 2001 From: janpisl Date: Mon, 13 Aug 2018 16:42:14 +0200 Subject: [PATCH 10/21] improve formatting --- pywps/inout/storage/db/__init__.py | 2 +- pywps/inout/storage/db/pg.py | 11 +++++------ pywps/inout/storage/db/sqlite.py | 6 ++---- 3 files changed, 8 insertions(+), 11 deletions(-) diff --git a/pywps/inout/storage/db/__init__.py b/pywps/inout/storage/db/__init__.py index 96e22196b..b7bac9c52 100644 --- a/pywps/inout/storage/db/__init__.py +++ b/pywps/inout/storage/db/__init__.py @@ -55,4 +55,4 @@ def store_raster_output(self, file_name, identifier): pass def store_other_output(self, file_name, identifier, uuid): - pass \ No newline at end of file + pass diff --git a/pywps/inout/storage/db/pg.py b/pywps/inout/storage/db/pg.py index 959f5de7d..f9bbc6bf2 100644 --- a/pywps/inout/storage/db/pg.py +++ b/pywps/inout/storage/db/pg.py @@ -81,7 +81,7 @@ def store_vector_output(self, file_name, identifier): dsc_in.Destroy() # returns process identifier (defined within the process) - return identifier + return identifier def store_raster_output(self, file_name, identifier): @@ -105,20 +105,20 @@ def store_other_output(self, file_name, identifier, uuid): base = declarative_base() - engine = sqlalchemy.create_engine('postgresql://{}:{}@{}:{}/{}'.format( + engine = sqlalchemy.create_engine( + 'postgresql://{}:{}@{}:{}/{}'.format( self.dbname, self.password, self.host, self.port, self.user - ) + ) ) # Create table class Other_output(base): __tablename__ = identifier - if isinstance(self, pg.PgStorage): - __table_args__ = {'schema': self.schema_name} + __table_args__ = {'schema': self.schema_name} primary_key = Column(Integer, primary_key=True) uuid = Column(String(64)) @@ -157,7 +157,6 @@ def store(self, output): # This should never happen raise Exception("Unknown data type") - url = '{}.{}.{}'.format(self.dbname, self.schema_name, output.identifier) # returns value for database storage defined in the STORE_TYPE class, diff --git a/pywps/inout/storage/db/sqlite.py b/pywps/inout/storage/db/sqlite.py index fd36425e0..46d8ea122 100644 --- a/pywps/inout/storage/db/sqlite.py +++ b/pywps/inout/storage/db/sqlite.py @@ -44,7 +44,7 @@ def store_vector_output(self, file_name, identifier): dsc_in.Destroy() # returns process identifier (defined within the process) - return identifier + return identifier def store_raster_output(self, file_name, identifier): @@ -58,6 +58,7 @@ def store_raster_output(self, file_name, identifier): def store_other_output(self, file_name, identifier, uuid): from pywps import configuration as config + import sqlalchemy from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker @@ -69,8 +70,6 @@ def store_other_output(self, file_name, identifier, uuid): # Create table class Other_output(base): __tablename__ = identifier - if isinstance(self, pg.PgStorage): - __table_args__ = {'schema': self.schema_name} primary_key = Column(Integer, primary_key=True) uuid = Column(String(64)) @@ -111,7 +110,6 @@ def store(self, output): url = '{}.{}'.format(self.target, output.identifier) - # returns value for database storage defined in the STORE_TYPE class, # name of the output file and a reference return (STORE_TYPE.DB, output.file, url) From 8ee29bd7797069239ebeca10bc93376e4f162cfe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 12:56:40 +0300 Subject: [PATCH 11/21] Use Enum Use Enum for DATA_TYPE (if not used and DATA_TYPE is only an object, it throws a TypeError: type "object" is not iterable --- pywps/inout/formats/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pywps/inout/formats/__init__.py b/pywps/inout/formats/__init__.py index 8ac8674b4..3acffa4c0 100644 --- a/pywps/inout/formats/__init__.py +++ b/pywps/inout/formats/__init__.py @@ -11,6 +11,7 @@ # based on Web Processing Service Best Practices Discussion Paper, OGC 12-029 # http://opengeospatial.org/standards/wps +from enum import Enum from collections import namedtuple import mimetypes from pywps.validator.mode import MODE @@ -23,7 +24,7 @@ # this should be Enum type (only compatible with Python 3) -class DATA_TYPE(object): +class DATA_TYPE(Enum): VECTOR = 0 RASTER = 1 OTHER = 2 From af7eb31c83a43e1d79bfe71a08558144e93be295 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 12:57:03 +0300 Subject: [PATCH 12/21] Fix a typo --- pywps/inout/storage/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pywps/inout/storage/__init__.py b/pywps/inout/storage/__init__.py index 73fc626f6..3110d224c 100644 --- a/pywps/inout/storage/__init__.py +++ b/pywps/inout/storage/__init__.py @@ -42,5 +42,5 @@ def __init__(self): """ """ - def store(self, ouput): + def store(self, output): pass From be48d1cab51a68a9eed257c8f70ca195afa4c83c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 13:08:17 +0300 Subject: [PATCH 13/21] Copy vector data to a schema specified in cfg Copy vector data to a schema specified in cfg instead of public schema. --- pywps/inout/storage/db/pg.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pywps/inout/storage/db/pg.py b/pywps/inout/storage/db/pg.py index f9bbc6bf2..33b2c2494 100644 --- a/pywps/inout/storage/db/pg.py +++ b/pywps/inout/storage/db/pg.py @@ -62,6 +62,7 @@ def store_vector_output(self, file_name, identifier): """ from osgeo import ogr + db_location = self.schema_name + "." + identifier dsc_out = ogr.Open("PG:" + self.target) # connect to a database and copy output there @@ -71,7 +72,7 @@ def store_vector_output(self, file_name, identifier): raise Exception("Reading data failed.") if dsc_out is None: raise NoApplicableCode("Could not connect to the database.") - layer = dsc_out.CopyLayer(dsc_in.GetLayer(), identifier, + layer = dsc_out.CopyLayer(dsc_in.GetLayer(), db_location, ['OVERWRITE=YES']) if layer is None: From d7ababcc272d531a0100f33a6048921f820f6a52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 13:33:41 +0300 Subject: [PATCH 14/21] FIx a typo --- pywps/inout/storage/db/pg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pywps/inout/storage/db/pg.py b/pywps/inout/storage/db/pg.py index 33b2c2494..73b6b3736 100644 --- a/pywps/inout/storage/db/pg.py +++ b/pywps/inout/storage/db/pg.py @@ -58,7 +58,7 @@ def initdb(self): pass def store_vector_output(self, file_name, identifier): - """ Open output file, connect to SQLite database and copiy data there + """ Open output file, connect to PG database and copy data there """ from osgeo import ogr From ffc3ed31fb08d1fa87f902b8a2d2d25861aaf9b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 13:34:01 +0300 Subject: [PATCH 15/21] Remvoe whitespaces --- tests/test_storage.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/tests/test_storage.py b/tests/test_storage.py index 46726f1d8..6664f65b6 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -107,7 +107,7 @@ def test_store(self): supported_formats=[FORMATS.GML]) vector_output.file = get_vector_file() - store_file = self.storage.store(vector_output) + store_file = self.storage.store(vector_output) assert len(store_file) == 3 assert store_file[0] == STORE_TYPE.PATH assert isinstance(store_file[1], str) @@ -125,22 +125,17 @@ def setUp(self): set_test_configuration() self.storage = PgStorage() - - dbsettings = "db" self.dbname = configuration.get_config_value(dbsettings, "dbname") self.user = configuration.get_config_value(dbsettings, "user") self.password = configuration.get_config_value(dbsettings, "password") self.host = configuration.get_config_value(dbsettings, "host") self.port = configuration.get_config_value(dbsettings, "port") - - self.storage.target = "dbname={} user={} password={} host={} port={}".format( self.dbname, self.user, self.password, self.host, self.port ) - self.storage.schema_name = configuration.get_config_value("db", "schema_name") self.storage.dbname = configuration.get_config_value("db", "dbname") From b97f7a55de76fbd5acaa2f1b0441ec8713837534 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 13:34:29 +0300 Subject: [PATCH 16/21] Edit comment formatting --- pywps/inout/outputs.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pywps/inout/outputs.py b/pywps/inout/outputs.py index f3894c6a9..63b3cd25a 100644 --- a/pywps/inout/outputs.py +++ b/pywps/inout/outputs.py @@ -111,12 +111,10 @@ def _json_reference(self, data): else: self.storage = FileStorage() - """ - to be implemented: + """to be implemented: elif store_type == 's3' and \ config.get_config_value('s3', 'bucket_name'): - self.storage = S3Storage() - """ + self.storage = S3Storage()""" if self.data_format: if self.data_format.mime_type: From 833a68ff54df45c937670cc42fbc8b5a46582ed9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 16:02:27 +0300 Subject: [PATCH 17/21] edit comment formatting --- pywps/inout/outputs.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pywps/inout/outputs.py b/pywps/inout/outputs.py index 63b3cd25a..f30b58572 100644 --- a/pywps/inout/outputs.py +++ b/pywps/inout/outputs.py @@ -111,10 +111,10 @@ def _json_reference(self, data): else: self.storage = FileStorage() - """to be implemented: - elif store_type == 's3' and \ - config.get_config_value('s3', 'bucket_name'): - self.storage = S3Storage()""" + #to be implemented: + #elif store_type == 's3' and \ + # config.get_config_value('s3', 'bucket_name'): + # self.storage = S3Storage() if self.data_format: if self.data_format.mime_type: From 5da5a26a096f6e598f96d8ba4ff5df0d713d010e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 16:18:26 +0300 Subject: [PATCH 18/21] Remove redundant imports --- pywps/inout/storage/db/pg.py | 1 - pywps/inout/storage/db/sqlite.py | 1 - tests/test_storage.py | 4 +--- 3 files changed, 1 insertion(+), 5 deletions(-) diff --git a/pywps/inout/storage/db/pg.py b/pywps/inout/storage/db/pg.py index 73b6b3736..f19e12089 100644 --- a/pywps/inout/storage/db/pg.py +++ b/pywps/inout/storage/db/pg.py @@ -99,7 +99,6 @@ def store_raster_output(self, file_name, identifier): def store_other_output(self, file_name, identifier, uuid): - from pywps import configuration as config from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker diff --git a/pywps/inout/storage/db/sqlite.py b/pywps/inout/storage/db/sqlite.py index 46d8ea122..094b4be84 100644 --- a/pywps/inout/storage/db/sqlite.py +++ b/pywps/inout/storage/db/sqlite.py @@ -57,7 +57,6 @@ def store_raster_output(self, file_name, identifier): def store_other_output(self, file_name, identifier, uuid): - from pywps import configuration as config import sqlalchemy from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine from sqlalchemy.ext.declarative import declarative_base diff --git a/tests/test_storage.py b/tests/test_storage.py index 6664f65b6..51b5c90bf 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -2,14 +2,12 @@ import atexit import shutil import tempfile -import sqlalchemy from sqlalchemy import create_engine, inspect from pywps import FORMATS from pywps.inout.storage import DummyStorage, STORE_TYPE from pywps.inout.storage.file import FileStorage from pywps.inout.storage.db.pg import PgStorage from pywps.inout.storage.db.sqlite import SQLiteStorage -from pywps.inout.storage.db import DbStorage from pywps import ComplexOutput import os from pywps import configuration @@ -131,7 +129,7 @@ def setUp(self): self.password = configuration.get_config_value(dbsettings, "password") self.host = configuration.get_config_value(dbsettings, "host") self.port = configuration.get_config_value(dbsettings, "port") - + self.storage.target = "dbname={} user={} password={} host={} port={}".format( self.dbname, self.user, self.password, self.host, self.port ) From 4221874c9e310b0591a42791ab3fb51a83867e71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 16:40:04 +0300 Subject: [PATCH 19/21] Remove obsolete code --- pywps/inout/storage/db/pg.py | 4 ++-- pywps/inout/storage/db/sqlite.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pywps/inout/storage/db/pg.py b/pywps/inout/storage/db/pg.py index f19e12089..e6413b6f3 100644 --- a/pywps/inout/storage/db/pg.py +++ b/pywps/inout/storage/db/pg.py @@ -86,7 +86,7 @@ def store_vector_output(self, file_name, identifier): def store_raster_output(self, file_name, identifier): - from subprocess import call, run, Popen, PIPE + from subprocess import run, Popen, PIPE # Convert raster to an SQL query command1 = ["raster2pgsql", "-a", file_name, self.schema_name + "." + identifier] @@ -105,7 +105,7 @@ def store_other_output(self, file_name, identifier, uuid): base = declarative_base() - engine = sqlalchemy.create_engine( + engine = create_engine( 'postgresql://{}:{}@{}:{}/{}'.format( self.dbname, self.password, diff --git a/pywps/inout/storage/db/sqlite.py b/pywps/inout/storage/db/sqlite.py index 094b4be84..812102c47 100644 --- a/pywps/inout/storage/db/sqlite.py +++ b/pywps/inout/storage/db/sqlite.py @@ -64,7 +64,7 @@ def store_other_output(self, file_name, identifier, uuid): base = declarative_base() - engine = sqlalchemy.create_engine("sqlite:///{}".format(self.target)) + engine = create_engine("sqlite:///{}".format(self.target)) # Create table class Other_output(base): From 6ef813ed0a2d9c4f3d0769d700bfaea39996803a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 17:05:49 +0300 Subject: [PATCH 20/21] Remove obsolete import and whitespaces --- pywps/inout/storage/db/sqlite.py | 1 - tests/test_storage.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pywps/inout/storage/db/sqlite.py b/pywps/inout/storage/db/sqlite.py index 812102c47..9499e4825 100644 --- a/pywps/inout/storage/db/sqlite.py +++ b/pywps/inout/storage/db/sqlite.py @@ -57,7 +57,6 @@ def store_raster_output(self, file_name, identifier): def store_other_output(self, file_name, identifier, uuid): - import sqlalchemy from sqlalchemy import Column, Integer, String, LargeBinary, DateTime, func, create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker diff --git a/tests/test_storage.py b/tests/test_storage.py index 51b5c90bf..e08e15289 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -156,7 +156,7 @@ def test_store_vector(self): assert store_vector[0] == STORE_TYPE.DB assert isinstance(store_vector[1], str) assert isinstance(store_vector[2], str) - + # Parse reference into dbname, schema and table reference = store_vector[2].split(".") From 6573678431a5996f1c28120d8018919d3df4e930 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Pi=C5=A1l?= Date: Mon, 24 Sep 2018 21:58:19 +0300 Subject: [PATCH 21/21] Remove whitespaces --- tests/test_storage.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/test_storage.py b/tests/test_storage.py index e08e15289..224431d4b 100644 --- a/tests/test_storage.py +++ b/tests/test_storage.py @@ -262,7 +262,7 @@ def tearDown(self): os.remove(self.storage.target) except: pass - + def test_sqlite_storage(self): assert isinstance(self.storage, SQLiteStorage) @@ -313,7 +313,6 @@ def test_store_raster(self): assert (reference[1] + "_rasters") in ins.get_table_names() - def test_store_other(self): # Test text output @@ -338,7 +337,6 @@ def test_store_other(self): ins = inspect(engine) assert (reference[1] in ins.get_table_names()) - # Test CSV output csv_output = ComplexOutput('csv', 'CSV output', supported_formats=[FORMATS.CSV])