Skip to content
This repository has been archived by the owner on Jun 4, 2020. It is now read-only.

Commit

Permalink
Merge pull request #65 from Oslandia/partial_pg_checkout
Browse files Browse the repository at this point in the history
Partial pg checkout
  • Loading branch information
vmora authored May 3, 2017
2 parents 718ca4a + 8347809 commit 6067153
Show file tree
Hide file tree
Showing 4 changed files with 80 additions and 22 deletions.
2 changes: 1 addition & 1 deletion metadata.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
name=versioning
qgisMinimumVersion=2.8
description=postgis database versioning
version=0.7
version=0.8
author=Oslandia
[email protected]
about=A tool to manage data history, branches, and to work offline with your PostGIS-stored data and QGIS.
Expand Down
31 changes: 30 additions & 1 deletion plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -909,6 +909,7 @@ def checkout(self):
user_selected_features.append(layer_selected_features_ids)
else:
user_selected_features.append([])

if not conn_info:
conn_info = uri.connectionInfo()
else:
Expand Down Expand Up @@ -952,11 +953,39 @@ def checkout_pg(self):
database layers"""
# for each connection, we need the list of tables
tables_for_conninfo = []
user_selected_features = []
uri = None
conn_info = ''
for layer_id in self.current_layers:
layer = QgsMapLayerRegistry.instance().mapLayer( layer_id )
uri = QgsDataSourceURI(layer.source())

# Get actual PK fror corresponding table
actual_table_pk = versioning.get_actual_pk( uri,self.pg_conn_info() )
#print "Actual table pk = " + actual_table_pk

layer_selected_features_ids = [f[actual_table_pk] for f in layer.selectedFeatures()]

# Check if PK from view [uri.keyColumn()] matches actual PK. If not,
# throw error. We need the right PK from the view in order to use
# the efficient selectedFeaturesIds(). selectedFeatures() or other
# ways that lead to a list of QGSFeature objects do not scale well.
if layer_selected_features_ids:
if uri.keyColumn()!= actual_table_pk:
QMessageBox.warning(None,"Warning","Layer \""+layer.name()+
" \" does not have the right primary key.\n\nCheckout will "
"proceed without the selected features subset.")
user_selected_features.append([])
else:
QMessageBox.warning(None,"Warning","You will be checking out "
"the subset of "+str(len(layer_selected_features_ids))+" features "
"you selected in layer \""+layer.name()+"\".\n\nIf you want "
"the whole data set for that layer, abort checkout in the pop "
"up asking for a filename, unselect features and start over.")
user_selected_features.append(layer_selected_features_ids)
else:
user_selected_features.append([])

if not conn_info:
conn_info = uri.connectionInfo()
else:
Expand Down Expand Up @@ -1003,7 +1032,7 @@ def checkout_pg(self):
return
print "checking out ", tables_for_conninfo, " from ", uri.connectionInfo()
versioning.pg_checkout( self.pg_conn_info(),
tables_for_conninfo, working_copy_schema )
tables_for_conninfo, working_copy_schema, user_selected_features )

# add layers from offline version
grp_idx = self.iface.legendInterface().addGroup( working_copy_schema )
Expand Down
11 changes: 11 additions & 0 deletions test/partial_checkout_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ def test():
pcon.close()

versioning.historize('dbname=epanet_test_db', 'epanet')

# spatialite working copy
versioning.checkout("dbname=epanet_test_db",["epanet_trunk_rev_head.junctions","epanet_trunk_rev_head.pipes"], sqlite_test_filename, [[1, 2, 3], []])
assert( os.path.isfile(sqlite_test_filename) and "sqlite file must exist at this point" )

Expand All @@ -47,5 +49,14 @@ def test():
scur.execute("SELECT * from junctions")
assert len(scur.fetchall()) == 3

# postgres working copy
versioning.pg_checkout("dbname=epanet_test_db",["epanet_trunk_rev_head.junctions","epanet_trunk_rev_head.pipes"], 'my_working_copy', [[1, 2, 3], []])

pcon = psycopg2.connect("dbname=epanet_test_db")
pcur = pcon.cursor()
pcur.execute("SELECT * from my_working_copy.junctions_view")
assert len(pcur.fetchall()) == 3


if __name__ == "__main__":
test()
58 changes: 38 additions & 20 deletions versioning.py
Original file line number Diff line number Diff line change
Expand Up @@ -1286,7 +1286,7 @@ def revisions(pg_conn_info, schema):
# we need the initial_revision table all the same
# for each table we need a diff and a view and triggers

def pg_checkout(pg_conn_info, pg_table_names, working_copy_schema):
def pg_checkout(pg_conn_info, pg_table_names, working_copy_schema, selected_feature_lists = []):
"""create postgres working copy from versioned database tables
pg_table_names must be complete schema.table names
the schema name must end with _branch_rev_head
Expand All @@ -1310,7 +1310,7 @@ def pg_checkout(pg_conn_info, pg_table_names, working_copy_schema):
pcur.execute("CREATE SCHEMA "+wcs)

first_table = True
for pg_table_name in pg_table_names:
for pg_table_name, feature_list in list(izip_longest(pg_table_names, selected_feature_lists)):
[schema, table] = pg_table_name.split('.')
[schema, sep, branch] = schema[:-9].rpartition('_')
del sep
Expand Down Expand Up @@ -1371,26 +1371,44 @@ def pg_checkout(pg_conn_info, pg_table_names, working_copy_schema):
"REFERENCES "+wcs+"."+table+"_diff("+pkey+") "
"ON UPDATE CASCADE ON DELETE CASCADE")

if feature_list:
additional_filter = "AND t.{pkey} IN ({features})".format(
pkey=pkey,
features = ','.join(str(f) for f in feature_list)
)
else:
additional_filter = ""

current_rev_sub = "(SELECT MAX(rev) FROM "+wcs+".initial_revision)"
pcur.execute("CREATE VIEW "+wcs+"."+table+"_view AS "
"SELECT "+pkey+", "+cols+" "
"FROM (SELECT "+cols+", "+hcols+" FROM "+wcs+"."+table+"_diff "
"WHERE ("+branch+"_rev_end IS NULL "
"OR "+branch+"_rev_end >= "+current_rev_sub+"+1 ) "
"AND "+branch+"_rev_begin IS NOT NULL "
"UNION "
"(SELECT DISTINCT ON ("+pkey+") "+cols+", t."+hcols+" "
"FROM "+schema+"."+table+" AS t "
"LEFT JOIN (SELECT "+pkey+" FROM "+wcs+"."+table+"_diff) "
"AS d "
"ON t."+pkey+" = d."+pkey+" "
"WHERE d."+pkey+" IS NULL "
"AND t."+branch+"_rev_begin <= "+current_rev_sub+" "
"AND ((t."+branch+"_rev_end IS NULL "
"OR t."+branch+"_rev_end >= "+current_rev_sub+") "
"AND t."+branch+"_rev_begin IS NOT NULL ))"
") AS src ")
pcur.execute("""
CREATE VIEW {wcs}.{table}_view AS
SELECT {pkey}, {cols}
FROM (
SELECT {cols}, {hcols}
FROM {wcs}.{table}_diff
WHERE ({branch}_rev_end IS NULL OR {branch}_rev_end >= {current_rev_sub}+1 )
AND {branch}_rev_begin IS NOT NULL
UNION
SELECT DISTINCT ON ({pkey}) {cols}, t.{hcols}
FROM {schema}.{table} AS t
LEFT JOIN (SELECT {pkey} FROM {wcs}.{table}_diff) AS d ON t.{pkey} = d.{pkey}
WHERE d.{pkey} IS NULL
AND t.{branch}_rev_begin <= {current_rev_sub}
AND ((t.{branch}_rev_end IS NULL
OR t.{branch}_rev_end >= {current_rev_sub})
AND t.{branch}_rev_begin IS NOT NULL)
{additional_filter}
) AS src """.format(
wcs=wcs,
schema=schema,
table=table,
pkey=pkey,
cols=cols,
hcols=hcols,
branch=branch,
current_rev_sub=current_rev_sub,
additional_filter=additional_filter
))

max_fid_sub = ("( SELECT MAX(max_fid) FROM ( SELECT MAX("+pkey+") "
"AS max_fid FROM "+wcs+"."+table+"_diff "
Expand Down

0 comments on commit 6067153

Please sign in to comment.