diff --git a/.gitignore b/.gitignore index 6c635b67..9f296503 100644 --- a/.gitignore +++ b/.gitignore @@ -23,4 +23,8 @@ nbbuild/ nbdist/ .nb-gradle/ -/log/ \ No newline at end of file +### Sqlite ### +*-journal + +/log/ + diff --git a/analysis/pom.xml b/analysis/pom.xml index 51790a4c..3e989603 100644 --- a/analysis/pom.xml +++ b/analysis/pom.xml @@ -22,7 +22,7 @@ limitations under the License. com.qihoo.qsql qsql - 0.5 + 0.6 qsql-calcite-analysis diff --git a/analysis/src/main/codegen/config.fmpp b/analysis/src/main/codegen/config.fmpp index a285093a..a434a80c 100644 --- a/analysis/src/main/codegen/config.fmpp +++ b/analysis/src/main/codegen/config.fmpp @@ -70,7 +70,8 @@ data: { statementParserMethods: [ "SqlShowTables()", "SqlShowSchemas()", - "SqlUseSchema()" + "SqlUseSchema()", + "SqlInsertOutput()" ] # List of methods for parsing custom literals. diff --git a/analysis/src/main/codegen/includes/parserImpls.ftl b/analysis/src/main/codegen/includes/parserImpls.ftl index 7af59eff..4b816d26 100644 --- a/analysis/src/main/codegen/includes/parserImpls.ftl +++ b/analysis/src/main/codegen/includes/parserImpls.ftl @@ -95,3 +95,42 @@ SqlNode SqlUseSchema(): } } +SqlNode SqlInsertOutput(): +{ + SqlParserPos pos; + SqlNode position; + SqlNodeList extendList = null; + SqlNodeList columnList = null; + SqlIdentifier dataSource; + SqlNode select; +} +{ + { pos = getPos(); } + position = CompoundIdentifier() + [ + LOOKAHEAD(5) + [ ] + extendList = ExtendList() { + position = extend(position, extendList); + } + ] + [ + LOOKAHEAD(2) + { final Pair p; } + p = ParenthesizedCompoundIdentifierList() { + if (p.right.size() > 0) { + position = extend(position, p.right); + } + if (p.left.size() > 0) { + columnList = p.left; + } + } + ] + + dataSource = CompoundIdentifier() + select = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) + { + return new SqlInsertOutput(pos, position, dataSource, columnList, select); + } +} + diff --git a/analysis/src/main/codegen/templates/Parser.jj b/analysis/src/main/codegen/templates/Parser.jj index a243fcc3..cd20faa5 100644 --- a/analysis/src/main/codegen/templates/Parser.jj +++ b/analysis/src/main/codegen/templates/Parser.jj @@ -968,8 +968,6 @@ SqlNode SqlStmt() : stmt = SqlExplain() | stmt = SqlDescribe() - | - stmt = SqlInsert() | stmt = SqlDelete() | @@ -984,6 +982,11 @@ SqlNode SqlStmt() : } } +/** + * Updated by qsql-team + * | stmt = SqlInsert() + */ + /** * Parses an SQL statement followed by the end-of-file symbol. */ @@ -1102,6 +1105,11 @@ SqlNode SqlExplain() : } } +/** + * Updated by qsql-team + * | stmt = SqlInsert() + */ + /** Parses a query (SELECT or VALUES) * or DML statement (INSERT, UPDATE, DELETE, MERGE). */ SqlNode SqlQueryOrDml() : @@ -1111,8 +1119,6 @@ SqlNode SqlQueryOrDml() : { ( stmt = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) - | - stmt = SqlInsert() | stmt = SqlDelete() | diff --git a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLSchema.java b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcSchema.java similarity index 51% rename from analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLSchema.java rename to analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcSchema.java index 24898506..496e7b97 100644 --- a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLSchema.java +++ b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcSchema.java @@ -1,8 +1,8 @@ -package org.apache.calcite.adapter.mysql; - -import org.apache.calcite.schema.impl.AbstractSchema; - -//TODO reduce all of default schemas like this which has no field and param -public class MySQLSchema extends AbstractSchema { - public MySQLSchema() {} -} +package org.apache.calcite.adapter.custom; + +import org.apache.calcite.schema.impl.AbstractSchema; + +//TODO reduce all of default schemas like this which has no field and param +public class JdbcSchema extends AbstractSchema { + public JdbcSchema() {} +} diff --git a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLSchemaFactory.java b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcSchemaFactory.java similarity index 50% rename from analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLSchemaFactory.java rename to analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcSchemaFactory.java index cb371507..442916f6 100644 --- a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLSchemaFactory.java +++ b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcSchemaFactory.java @@ -1,19 +1,19 @@ -package org.apache.calcite.adapter.mysql; - -import org.apache.calcite.schema.Schema; -import org.apache.calcite.schema.SchemaFactory; -import org.apache.calcite.schema.SchemaPlus; - -import java.util.Map; - -public class MySQLSchemaFactory implements SchemaFactory { - - public static final MySQLSchemaFactory INSTANCE = new MySQLSchemaFactory(); - - private MySQLSchemaFactory() {} - - @Override - public Schema create(SchemaPlus parentSchema, String name, Map operand) { - return new MySQLSchema(); - } -} +package org.apache.calcite.adapter.custom; + +import org.apache.calcite.schema.Schema; +import org.apache.calcite.schema.SchemaFactory; +import org.apache.calcite.schema.SchemaPlus; + +import java.util.Map; + +public class JdbcSchemaFactory implements SchemaFactory { + + public static final JdbcSchemaFactory INSTANCE = new JdbcSchemaFactory(); + + private JdbcSchemaFactory() {} + + @Override + public Schema create(SchemaPlus parentSchema, String name, Map operand) { + return new JdbcSchema(); + } +} diff --git a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTable.java b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTable.java similarity index 80% rename from analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTable.java rename to analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTable.java index 4f271edd..1a84513e 100644 --- a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTable.java +++ b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTable.java @@ -1,63 +1,67 @@ -package org.apache.calcite.adapter.mysql; - -import org.apache.calcite.plan.RelOptCluster; -import org.apache.calcite.plan.RelOptTable; -import org.apache.calcite.rel.RelNode; -import org.apache.calcite.rel.type.RelDataType; -import org.apache.calcite.rel.type.RelDataTypeFactory; -import org.apache.calcite.schema.TranslatableTable; -import org.apache.calcite.schema.impl.AbstractTable; - -import java.util.Properties; - -public class MySQLTable extends AbstractTable implements TranslatableTable { - public final String jdbcDriver; - public final String jdbcUrl; - public final String jdbcUser; - public final String jdbcPassword; - public final String tableName; - public final String modelUri; - public final String dbName; - - public Properties properties; - - public Properties getProperties() { - return properties; - } - - MySQLTable(String tableName, String dbName, - String driver, String url, String user, - String password, String modelUri) { - this.modelUri = modelUri; - this.jdbcDriver = driver; - this.jdbcUrl = url; - this.jdbcUser = user; - this.jdbcPassword = password; - this.tableName = tableName; - this.dbName = dbName; - - this.properties = new Properties(); - properties.put("jdbcDriver", driver); - properties.put("jdbcUrl", url); - properties.put("jdbcUser", user); - properties.put("jdbcPassword", password); - properties.put("tableName", tableName); - properties.put("dbName", dbName); - } - - @Override - public RelNode toRel(RelOptTable.ToRelContext context, RelOptTable relOptTable) { - final RelOptCluster cluster = context.getCluster(); - return new MySQLTableScan(cluster, cluster.traitSet(), relOptTable); - } - - @Override - public String getBaseName() { - return dbName; - } - - @Override - public RelDataType getRowType(RelDataTypeFactory typeFactory) { - return super.getRowType(modelUri, dbName, tableName, typeFactory); - } -} +package org.apache.calcite.adapter.custom; + +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.schema.TranslatableTable; +import org.apache.calcite.schema.impl.AbstractTable; + +import java.util.Properties; + +public class JdbcTable extends AbstractTable implements TranslatableTable { + public final String jdbcDriver; + public final String jdbcUrl; + public final String jdbcUser; + public final String jdbcPassword; + public final String tableName; + public final String modelUri; + public final String dbName; + public final String dbType; + + + public Properties properties; + + public Properties getProperties() { + return properties; + } + + JdbcTable(String tableName, String dbName, + String driver, String url, String user, + String password, String modelUri, String dbType) { + this.modelUri = modelUri; + this.jdbcDriver = driver; + this.jdbcUrl = url; + this.jdbcUser = user; + this.jdbcPassword = password; + this.tableName = tableName; + this.dbName = dbName; + this.dbType = dbType; + + this.properties = new Properties(); + properties.put("jdbcDriver", driver); + properties.put("jdbcUrl", url); + properties.put("jdbcUser", user); + properties.put("jdbcPassword", password); + properties.put("tableName", tableName); + properties.put("dbName", dbName); + properties.put("dbType", dbType); + } + + @Override + public RelNode toRel(RelOptTable.ToRelContext context, RelOptTable relOptTable) { + final RelOptCluster cluster = context.getCluster(); + return new JdbcTableScan(cluster, cluster.traitSet(), relOptTable); + } + + @Override + public String getBaseName() { + return dbName; + } + + @Override + public RelDataType getRowType(RelDataTypeFactory typeFactory) { + return super.getRowType(modelUri, dbName, tableName, typeFactory); + } +} diff --git a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTableFactory.java b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTableFactory.java similarity index 77% rename from analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTableFactory.java rename to analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTableFactory.java index 6410ee11..301d5562 100644 --- a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTableFactory.java +++ b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTableFactory.java @@ -1,27 +1,28 @@ -package org.apache.calcite.adapter.mysql; - -import org.apache.calcite.rel.type.RelDataType; -import org.apache.calcite.schema.SchemaPlus; -import org.apache.calcite.schema.Table; -import org.apache.calcite.schema.TableFactory; - -import java.util.Map; - -public class MySQLTableFactory implements TableFactory { - - @Override - public Table create(SchemaPlus schema, String name, Map operand, RelDataType rowType) { - String tableName = operand.get("tableName").toString(); - String dbName = operand.get("dbName").toString(); - String jdbcUrl = operand.get("jdbcUrl").toString(); - String jdbcUser = operand.get("jdbcUser").toString(); - String jdbcPassword = operand.get("jdbcPassword").toString(); - String jdbcDriver = operand.get("jdbcDriver").toString(); - String modelUri = operand.get("modelUri").toString(); - - return new MySQLTable(tableName, dbName, - jdbcDriver, jdbcUrl, - jdbcUser, jdbcPassword, - modelUri); - } +package org.apache.calcite.adapter.custom; + +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.schema.SchemaPlus; +import org.apache.calcite.schema.Table; +import org.apache.calcite.schema.TableFactory; + +import java.util.Map; + +public class JdbcTableFactory implements TableFactory { + + @Override + public Table create(SchemaPlus schema, String name, Map operand, RelDataType rowType) { + String tableName = operand.get("tableName").toString(); + String dbName = operand.get("dbName").toString(); + String jdbcUrl = operand.get("jdbcUrl").toString(); + String jdbcUser = operand.get("jdbcUser").toString(); + String jdbcPassword = operand.get("jdbcPassword").toString(); + String jdbcDriver = operand.get("jdbcDriver").toString(); + String modelUri = operand.get("modelUri").toString(); + String dbType = operand.get("dbType").toString(); + + return new JdbcTable(tableName, dbName, + jdbcDriver, jdbcUrl, + jdbcUser, jdbcPassword, + modelUri, dbType); + } } \ No newline at end of file diff --git a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTableScan.java b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTableScan.java similarity index 53% rename from analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTableScan.java rename to analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTableScan.java index 5d7942ed..469ba93d 100644 --- a/analysis/src/main/java/org/apache/calcite/adapter/mysql/MySQLTableScan.java +++ b/analysis/src/main/java/org/apache/calcite/adapter/custom/JdbcTableScan.java @@ -1,13 +1,13 @@ -package org.apache.calcite.adapter.mysql; - -import org.apache.calcite.plan.RelOptCluster; -import org.apache.calcite.plan.RelOptTable; -import org.apache.calcite.plan.RelTraitSet; -import org.apache.calcite.rel.core.TableScan; - -public class MySQLTableScan extends TableScan { - - protected MySQLTableScan(RelOptCluster cluster, RelTraitSet traitSet, RelOptTable table) { - super(cluster, traitSet, table); - } -} +package org.apache.calcite.adapter.custom; + +import org.apache.calcite.plan.RelOptCluster; +import org.apache.calcite.plan.RelOptTable; +import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.core.TableScan; + +public class JdbcTableScan extends TableScan { + + protected JdbcTableScan(RelOptCluster cluster, RelTraitSet traitSet, RelOptTable table) { + super(cluster, traitSet, table); + } +} diff --git a/analysis/src/main/java/org/apache/calcite/adapter/mysql/package-info.java b/analysis/src/main/java/org/apache/calcite/adapter/custom/package-info.java similarity index 95% rename from analysis/src/main/java/org/apache/calcite/adapter/mysql/package-info.java rename to analysis/src/main/java/org/apache/calcite/adapter/custom/package-info.java index ec8cee55..c9f19995 100644 --- a/analysis/src/main/java/org/apache/calcite/adapter/mysql/package-info.java +++ b/analysis/src/main/java/org/apache/calcite/adapter/custom/package-info.java @@ -1,18 +1,18 @@ -/* - * Copyright <2018> . - * - * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated - * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the - * Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE - * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR - * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR - * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -package org.apache.calcite.adapter.mysql; \ No newline at end of file +/* + * Copyright <2018> . + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the + * Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE + * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR + * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.apache.calcite.adapter.custom; \ No newline at end of file diff --git a/analysis/src/main/java/org/apache/calcite/jdbc/JavaTypeFactoryImpl.java b/analysis/src/main/java/org/apache/calcite/jdbc/JavaTypeFactoryImpl.java index 82ac019b..87ad51cb 100644 --- a/analysis/src/main/java/org/apache/calcite/jdbc/JavaTypeFactoryImpl.java +++ b/analysis/src/main/java/org/apache/calcite/jdbc/JavaTypeFactoryImpl.java @@ -241,13 +241,17 @@ public Type getJavaClass(RelDataType type) { public RelDataType getDataType(RelDataTypeFactory relDataTypeFactory, String javaType) { switch (javaType.toUpperCase()) { case "INT": + case "INTEGER": return relDataTypeFactory.createSqlType(SqlTypeName.INTEGER); case "STRING": + case "VARCHAR": return relDataTypeFactory.createSqlType(SqlTypeName.VARCHAR); case "TINYINT": return relDataTypeFactory.createSqlType(SqlTypeName.TINYINT); case "SMALLINT": return relDataTypeFactory.createSqlType(SqlTypeName.SMALLINT); + case "BIGINT": + return relDataTypeFactory.createSqlType(SqlTypeName.BIGINT); case "FLOAT": return relDataTypeFactory.createSqlType(SqlTypeName.FLOAT); case "DOUBLE": @@ -262,6 +266,8 @@ public RelDataType getDataType(RelDataTypeFactory relDataTypeFactory, String jav return relDataTypeFactory.createSqlType(SqlTypeName.MAP); case "DATE": return relDataTypeFactory.createSqlType(SqlTypeName.DATE); + case "TIMESTAMP": + return relDataTypeFactory.createSqlType(SqlTypeName.TIMESTAMP); default: return relDataTypeFactory.createSqlType(SqlTypeName.VARCHAR); } diff --git a/analysis/src/main/java/org/apache/calcite/rel/rel2sql/RelToSqlConverter.java b/analysis/src/main/java/org/apache/calcite/rel/rel2sql/RelToSqlConverter.java index 9ccb5d7f..8cd5f26e 100644 --- a/analysis/src/main/java/org/apache/calcite/rel/rel2sql/RelToSqlConverter.java +++ b/analysis/src/main/java/org/apache/calcite/rel/rel2sql/RelToSqlConverter.java @@ -172,7 +172,7 @@ public Result visit(Filter e) { } } - //Updated by + //Updated by qsql-team /** @see #dispatch */ public Result visit(Project e) { Result x = visitChild(0, e.getInput()); diff --git a/analysis/src/main/java/org/apache/calcite/sql/dialect/HiveSqlDialect.java b/analysis/src/main/java/org/apache/calcite/sql/dialect/HiveSqlDialect.java index d46526a0..4554a3b1 100644 --- a/analysis/src/main/java/org/apache/calcite/sql/dialect/HiveSqlDialect.java +++ b/analysis/src/main/java/org/apache/calcite/sql/dialect/HiveSqlDialect.java @@ -17,9 +17,13 @@ package org.apache.calcite.sql.dialect; import org.apache.calcite.config.NullCollation; +import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlUtil; import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.fun.HiveSqlOperatorTable; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; /** * A SqlDialect implementation for the Apache Hive database. @@ -60,6 +64,16 @@ public HiveSqlDialect(Context context) { return null; } + //Updated by qsql-team + @Override public void unparseCall(SqlWriter writer, SqlCall call, + int leftPrec, int rightPrec) { + if (call.getOperator() == SqlStdOperatorTable.CONCAT) { + SqlUtil.unparseFunctionSyntax(HiveSqlOperatorTable.CONCAT, writer, call); + } else { + super.unparseCall(writer, call, leftPrec, rightPrec); + } + } + @Override public boolean supportsCharSet() { return false; } diff --git a/analysis/src/main/java/org/apache/calcite/sql/dialect/MysqlSqlDialect.java b/analysis/src/main/java/org/apache/calcite/sql/dialect/MysqlSqlDialect.java index 42f3f97e..1e589a6b 100644 --- a/analysis/src/main/java/org/apache/calcite/sql/dialect/MysqlSqlDialect.java +++ b/analysis/src/main/java/org/apache/calcite/sql/dialect/MysqlSqlDialect.java @@ -34,7 +34,9 @@ import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlSelect; +import org.apache.calcite.sql.SqlUtil; import org.apache.calcite.sql.SqlWriter; +import org.apache.calcite.sql.fun.HiveSqlOperatorTable; import org.apache.calcite.sql.fun.SqlCase; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.parser.SqlParserPos; @@ -142,20 +144,26 @@ public MysqlSqlDialect(Context context) { return caseExpr; } + //Updated by qsql-team @Override public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) { - switch (call.getKind()) { - case FLOOR: - if (call.operandCount() != 2) { - super.unparseCall(writer, call, leftPrec, rightPrec); - return; - } - unparseFloor(writer, call); - break; + if (call.getOperator() == SqlStdOperatorTable.CONCAT) { + SqlUtil.unparseFunctionSyntax(HiveSqlOperatorTable.CONCAT, writer, call); + } else { + switch (call.getKind()) { + case FLOOR: + if (call.operandCount() != 2) { + super.unparseCall(writer, call, leftPrec, rightPrec); + return; + } - default: - super.unparseCall(writer, call, leftPrec, rightPrec); + unparseFloor(writer, call); + break; + + default: + super.unparseCall(writer, call, leftPrec, rightPrec); + } } } diff --git a/analysis/src/main/java/org/apache/calcite/sql/ext/SqlInsertOutput.java b/analysis/src/main/java/org/apache/calcite/sql/ext/SqlInsertOutput.java new file mode 100644 index 00000000..dd53839d --- /dev/null +++ b/analysis/src/main/java/org/apache/calcite/sql/ext/SqlInsertOutput.java @@ -0,0 +1,66 @@ +package org.apache.calcite.sql.ext; + +import java.util.List; +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlSpecialOperator; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.util.ImmutableNullableList; + +//Updated by qsql-team +public class SqlInsertOutput extends SqlCall { + public static final SqlSpecialOperator OPERATOR = + new SqlSpecialOperator("WRITE OUTPUT", SqlKind.OTHER){ + @Override + public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) { + return new SqlInsertOutput(pos, operands[0], + (SqlIdentifier) operands[1], (SqlNodeList) operands[2], operands[3]); + } + }; + + private SqlNode path; + private SqlIdentifier dataSource; + private SqlNode select; + private SqlNodeList columnsList; + + public SqlInsertOutput( + SqlParserPos pos, SqlNode path, + SqlIdentifier dataSource, SqlNodeList columnsList, SqlNode select) { + super(pos); + this.path = path; + this.select = select; + this.dataSource = dataSource; + this.columnsList = columnsList; + } + + @Override + public SqlOperator getOperator() { + return OPERATOR; + } + + @Override + public List getOperandList() { + return ImmutableNullableList.of(path, select); + } + + public SqlNode getPath() { + return path; + } + + public SqlIdentifier getDataSource() { + return dataSource; + } + + public SqlNode getSelect() { + return select; + } + + public SqlNodeList getColumnsList() { + return columnsList; + } +} diff --git a/analysis/src/main/java/org/apache/calcite/sql/ext/SqlShowTables.java b/analysis/src/main/java/org/apache/calcite/sql/ext/SqlShowTables.java index d193e555..093f6a43 100644 --- a/analysis/src/main/java/org/apache/calcite/sql/ext/SqlShowTables.java +++ b/analysis/src/main/java/org/apache/calcite/sql/ext/SqlShowTables.java @@ -6,6 +6,7 @@ import java.util.List; +//Updated by qsql-team public class SqlShowTables extends SqlCall { private final SqlIdentifier db; diff --git a/analysis/src/main/java/org/apache/calcite/sql/fun/HiveSqlOperatorTable.java b/analysis/src/main/java/org/apache/calcite/sql/fun/HiveSqlOperatorTable.java new file mode 100644 index 00000000..b3adbcad --- /dev/null +++ b/analysis/src/main/java/org/apache/calcite/sql/fun/HiveSqlOperatorTable.java @@ -0,0 +1,16 @@ +package org.apache.calcite.sql.fun; + +import org.apache.calcite.sql.SqlFunction; +import org.apache.calcite.sql.SqlFunctionCategory; +import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.type.OperandTypes; +import org.apache.calcite.sql.type.ReturnTypes; +import org.apache.calcite.sql.util.ReflectiveSqlOperatorTable; + +public class HiveSqlOperatorTable extends ReflectiveSqlOperatorTable { + public static final SqlFunction CONCAT = + new SqlFunction("CONCAT", SqlKind.OTHER_FUNCTION, + ReturnTypes.ARG0_NULLABLE_VARYING, null, + OperandTypes.STRING_SAME_SAME, + SqlFunctionCategory.STRING); +} diff --git a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlDatePartFunction.java b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlDatePartFunction.java index cb8372d6..ce0dee2f 100644 --- a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlDatePartFunction.java +++ b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlDatePartFunction.java @@ -54,13 +54,14 @@ public SqlDatePartFunction(String name, TimeUnit timeUnit) { //~ Methods ---------------------------------------------------------------- - @Override public SqlNode rewriteCall(SqlValidator validator, SqlCall call) { - final List operands = call.getOperandList(); - final SqlParserPos pos = call.getParserPosition(); - return SqlStdOperatorTable.EXTRACT.createCall(pos, - new SqlIntervalQualifier(timeUnit, null, SqlParserPos.ZERO), - operands.get(0)); - } + //Updated by qsql-team + // @Override public SqlNode rewriteCall(SqlValidator validator, SqlCall call) { + // final List operands = call.getOperandList(); + // final SqlParserPos pos = call.getParserPosition(); + // return SqlStdOperatorTable.EXTRACT.createCall(pos, + // new SqlIntervalQualifier(timeUnit, null, SqlParserPos.ZERO), + // operands.get(0)); + // } public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.of(1); diff --git a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlRegexpExtractFunction.java b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlRegexpExtractFunction.java index a310eab1..66dc7a49 100644 --- a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlRegexpExtractFunction.java +++ b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlRegexpExtractFunction.java @@ -21,7 +21,7 @@ public SqlRegexpExtractFunction() { SqlKind.OTHER_FUNCTION, ReturnTypes.ARG0_NULLABLE_VARYING, null, - null, + OperandTypes.STRING_STRING_INTEGER, SqlFunctionCategory.STRING); } diff --git a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlRegexpReplaceFunction.java b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlRegexpReplaceFunction.java index 546e899f..c4170c14 100644 --- a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlRegexpReplaceFunction.java +++ b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlRegexpReplaceFunction.java @@ -21,7 +21,7 @@ public SqlRegexpReplaceFunction() { SqlKind.OTHER_FUNCTION, ReturnTypes.ARG0_NULLABLE_VARYING, null, - null, + OperandTypes.STRING_STRING_STRING, SqlFunctionCategory.STRING); } diff --git a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlSubstringFunction.java b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlSubstringFunction.java index abccbfc1..5eadfbdb 100644 --- a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlSubstringFunction.java +++ b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlSubstringFunction.java @@ -171,23 +171,24 @@ public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.between(2, 3); } - public void unparse( - SqlWriter writer, - SqlCall call, - int leftPrec, - int rightPrec) { - final SqlWriter.Frame frame = writer.startFunCall(getName()); - call.operand(0).unparse(writer, leftPrec, rightPrec); - writer.sep("FROM"); - call.operand(1).unparse(writer, leftPrec, rightPrec); - - if (3 == call.operandCount()) { - writer.sep("FOR"); - call.operand(2).unparse(writer, leftPrec, rightPrec); - } - - writer.endFunCall(frame); - } + //Updated by Qsql-team + // public void unparse( + // SqlWriter writer, + // SqlCall call, + // int leftPrec, + // int rightPrec) { + // final SqlWriter.Frame frame = writer.startFunCall(getName()); + // call.operand(0).unparse(writer, leftPrec, rightPrec); + // writer.sep("FROM"); + // call.operand(1).unparse(writer, leftPrec, rightPrec); + // + // if (3 == call.operandCount()) { + // writer.sep("FOR"); + // call.operand(2).unparse(writer, leftPrec, rightPrec); + // } + // + // writer.endFunCall(frame); + // } @Override public SqlMonotonicity getMonotonicity(SqlOperatorBinding call) { // SUBSTRING(x FROM 0 FOR constant) has same monotonicity as x diff --git a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlTrimFunction.java b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlTrimFunction.java index 87f7990e..a57482fa 100644 --- a/analysis/src/main/java/org/apache/calcite/sql/fun/SqlTrimFunction.java +++ b/analysis/src/main/java/org/apache/calcite/sql/fun/SqlTrimFunction.java @@ -102,16 +102,17 @@ public SqlTrimFunction(String name, SqlKind kind, //~ Methods ---------------------------------------------------------------- + //Updated by qsql-team public void unparse( SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) { final SqlWriter.Frame frame = writer.startFunCall(getName()); - assert call.operand(0) instanceof SqlLiteral : call.operand(0); - call.operand(0).unparse(writer, leftPrec, rightPrec); - call.operand(1).unparse(writer, leftPrec, rightPrec); - writer.sep("FROM"); + // assert call.operand(0) instanceof SqlLiteral : call.operand(0); + // call.operand(0).unparse(writer, leftPrec, rightPrec); + // call.operand(1).unparse(writer, leftPrec, rightPrec); + // writer.sep("FROM"); call.operand(2).unparse(writer, leftPrec, rightPrec); writer.endFunCall(frame); } diff --git a/assembly/pom.xml b/assembly/pom.xml index 293c2f9b..45dfc25e 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -5,7 +5,7 @@ qsql com.qihoo.qsql - 0.5 + 0.6 ../pom.xml 4.0.0 @@ -30,11 +30,6 @@ qsql-calcite-analysis ${project.version} - - com.qihoo.qsql - qsql-core - ${project.version} - com.qihoo.qsql qsql-example diff --git a/assembly/src/main/assembly/assembly-linux.xml b/assembly/src/main/assembly/assembly-linux.xml index 687b9040..9777df79 100644 --- a/assembly/src/main/assembly/assembly-linux.xml +++ b/assembly/src/main/assembly/assembly-linux.xml @@ -92,6 +92,7 @@ org.elasticsearch:elasticsearch-spark*:jar mysql:mysql-connector-java*:jar org.apache.derby:derby*:jar + *:ojdbc*:jar /${qsql.release}/lib/spark runtime diff --git a/bin/meta-extract b/bin/meta-extract new file mode 100644 index 00000000..cb5096c6 --- /dev/null +++ b/bin/meta-extract @@ -0,0 +1,100 @@ +#!/bin/bash + +export QSQL_HOME="$(cd "`dirname "$0"`"/..; pwd)" +. "${QSQL_HOME}/bin/load-qsql-env" + +ARGS=`getopt -o "p:d:r:h" -n "meta-extract" -- "$@"` + +eval set -- "${ARGS}" +# eval is evil + +while true +do + case "${1}" in + -p) + shift; + PROPERTY=${1} + shift; + ;; + -d) + shift; + DATA_SOURCE=${1} + shift; + ;; + -r) + shift; + MATCHER=${1} + shift; + ;; + -h) + shift; + HELP_ENABLE="true" + ;; + --) + shift + break + ;; + esac +done + +if [ -z "${PROPERTY}" ]; then + echo "ERROR: Connection information is necessary. Please read doc" + exit 1 +fi + +if [ -z "${DATA_SOURCE}" ]; then + echo "ERROR: Data source type is necessary. e.g.: es, mysql" + exit 1 +fi + +if [ -z "${MATCHER}" ]; then + MATCHER="%%" +fi + +if [ ! -z "${HELP_ENABLE}" ]; then + echo "Options: + -p Server connection property in JSON format. (e.g.: {\"jdbcDriver\": \"driver\", \"jdbcUrl\": \"localhost\"} ) + -d Data source type. (e.g.: ES, MySQL) + -r Table name fuzzy matching rule, support operators (%, _, ?) + " + exit 1 +fi + +if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then + JAVA_RUNNER="${JAVA_HOME}/bin/java" +else + if [ `command -v java` ]; then + JAVA_RUNNER="java" + else + echo "ERROR: JAVA_HOME is not set" >&2 + exit 1 + fi +fi + +if [[ "$JAVA_RUNNER" ]]; then + version=$("$JAVA_RUNNER" -version 2>&1 | awk -F '"' '/version/ {print $2}') + OLD_IFS=$IFS + IFS=. + read major minor extra <<<"$version"; + IFS=$OLD_IFS + if (( major == 1 && minor < 8 )); + then + echo "ERROR: Required java version >= 1.8" + exit 1 + fi +fi + +for jar in `find "${QSQL_HOME}/lib" -maxdepth 1 -name "*.jar"` +do + if [ ! -n "${QSQL_JARS}" ] + then + export QSQL_JARS="${jar}" + else + export QSQL_JARS="${QSQL_JARS}:${jar}" + fi +done + +"${JAVA_RUNNER}" -cp "${QSQL_JARS}" com.qihoo.qsql.metadata.collect.MetadataCollector \ +"${PROPERTY}" "${DATA_SOURCE}" "${MATCHER}" + +exit 0 diff --git a/bin/metadata b/bin/metadata index cd1005b0..b5fac258 100644 --- a/bin/metadata +++ b/bin/metadata @@ -95,7 +95,7 @@ else fi fi -QSQL_METADATA_CLASSPATH="${QSQL_HOME}/lib/qsql-core-0.5.jar" +QSQL_METADATA_CLASSPATH="${QSQL_HOME}/lib/qsql-core-0.6.jar" QSQL_METADATA_CLASSPATH="${QSQL_HOME}/lib/mysql-connector-java-5.1.20.jar:${QSQL_METADATA_CLASSPATH}" QSQL_METADATA_CLASSPATH="${QSQL_HOME}/lib/ibatis-core-3.0.jar:${QSQL_METADATA_CLASSPATH}" diff --git a/bin/qsql b/bin/qsql index fd228afa..bc2eb3ca 100644 --- a/bin/qsql +++ b/bin/qsql @@ -26,7 +26,7 @@ do shift case "${1}" in "") - echo "no --runner selected, decide to default runner"; + echo "ERROR: No --runner selected, decide to default runner"; QSQL_RUNNER="${QSQL_DEFAULT_RUNNER}" shift ; ;; @@ -47,7 +47,7 @@ do shift ; ;; *) - echo "--runner error! please select property runner!" + echo "ERROR: `--runner` error! please select property runner!" exit 1 ;; esac @@ -56,7 +56,7 @@ do shift ; case "${1}" in "") - echo "no --master selected, decide to default master"; + echo "ERROR: No --master selected, decide to default master"; QSQL_MASTER="${QSQL_DEFAULT_MASTER}" shift ;; @@ -76,8 +76,24 @@ do QSQL_MASTER="yarn-client" shift; ;; + local*) + QSQL_MASTER="local[*]" + shift; + ;; + mesos*) + QSQL_MASTER=${1} + shift; + ;; + spark*) + QSQL_MASTER=${1} + shift; + ;; + k8s*) + QSQL_MASTER=${1} + shift; + ;; *) - echo "--master error! please select property master!" + echo "ERROR: `--master` error! please select property master!" exit 1 ;; esac @@ -86,7 +102,7 @@ do shift ; case "${1}" in "") - echo "no --worker_memory selected, decide to default worker_memory"; + echo "ERROR: No --worker_memory selected, decide to default worker_memory"; QSQL_WORKER_MEMORY="${QSQL_DEFAULT_WORKER_MEMORY}" shift ;; @@ -100,7 +116,7 @@ do shift ; case "${1}" in "") - echo "no --driver_memory selected, decide to default driver_memory"; + echo "ERROR: No --driver_memory selected, decide to default driver_memory"; QSQL_DRIVER_MEMORY="${QSQL_DEFAULT_DRIVER_MEMORY}" shift ;; @@ -114,7 +130,7 @@ do shift; case "${1}" in "") - echo "no --worker_num selected, decide to default worker_num"; + echo "ERROR: No --worker_num selected, decide to default worker_num"; QSQL_WORKER_NUM="${QSQL_DEFAULT_WORKER_NUM}" shift ;; @@ -180,13 +196,13 @@ fi . "${QSQL_HOME}/bin/qsql-env" -CONF=${CONF}" --jar_name=${QSQL_HOME}/lib/qsql-core-0.5.jar " +CONF=${CONF}" --jar_name=${QSQL_HOME}/lib/qsql-core-0.6.jar " CONF=${CONF}" --class_name=com.qihoo.qsql.cli.QSqlSubmit " CONF=${CONF}" --jar=${JARS} " if [ ! -z "${SQL}" ] ; then - SQL=`echo "${SQL}" | base64 -w 0` + SQL=$(echo "${SQL}" | base64 -w 0) CONF=${CONF}"--sql=${SQL}" eval ${QSQL_HOME}/bin/qsql-class com.qihoo.qsql.cli.QSqlSubmit "$CONF" exit $? diff --git a/bin/qsql-class b/bin/qsql-class index 8c863f62..bc5f71be 100644 --- a/bin/qsql-class +++ b/bin/qsql-class @@ -2,30 +2,56 @@ export QSQL_HOME="$(cd "`dirname "$0"`"/..; pwd)" -if [ -n "${JAVA_HOME}" ]; then +if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then JAVA_RUNNER="${JAVA_HOME}/bin/java" else if [ `command -v java` ]; then JAVA_RUNNER="java" else - echo "JAVA_HOME is not set" >&2 + echo "ERROR: JAVA_HOME is not set" >&2 exit 1 fi fi -if [ -n "${SPARK_HOME}" ]; then +if [[ "$JAVA_RUNNER" ]]; then + version=$("$JAVA_RUNNER" -version 2>&1 | awk -F '"' '/version/ {print $2}') + IFS=. + read major minor extra <<<"$version"; + if (( major == 1 && minor < 8 )); + then + echo "ERROR: Required java version >= 1.8" + exit 1 + fi +fi + +if [[ -n "$SPARK_HOME" ]] && [[ -x "$SPARK_HOME/bin/spark-submit" ]]; then SPARK_RUNNER="${SPARK_HOME}/bin/spark-submit" else if [ `command -v spark-submit` ]; then SPARK_RUNNER="spark-submit" else - echo "SPARK_HOME is not set" >&2 + echo "ERROR: SPARK_HOME is not set" >&2 exit 1 fi fi +if [[ "$SPARK_RUNNER" ]]; then + version=$("$SPARK_RUNNER" --version 2>&1 | awk -F 'version' '/version/ {print $2}') + IFS=. read major minor extra <<< "$version"; + if (( major >= 2)); + then + if (( minor < 2)); + then + echo "ERROR: Required spark version >= 2.2" + exit 1 + fi + else + echo "ERROR: Required spark version >= 2.2" + exit 1 + fi +fi -QSQL_LAUNCH_CLASSPATH="${QSQL_HOME}/lib/qsql-core-0.5.jar" +QSQL_LAUNCH_CLASSPATH="${QSQL_HOME}/lib/qsql-core-0.6.jar" QSQL_LAUNCH_CLASSPATH="${QSQL_LAUNCH_CLASSPATH}:${QSQL_JARS}" "${JAVA_RUNNER}" -cp "${QSQL_LAUNCH_CLASSPATH}" com.qihoo.qsql.launcher.ExecutionDispatcher "$@" diff --git a/conf/base-env.sh.template b/conf/base-env.sh similarity index 100% rename from conf/base-env.sh.template rename to conf/base-env.sh diff --git a/conf/qsql-runner.properties b/conf/qsql-runner.properties index a3bcbdb2..1b17f0b6 100644 --- a/conf/qsql-runner.properties +++ b/conf/qsql-runner.properties @@ -10,7 +10,7 @@ # spark.sql.hive.metastore.jars= # spark.sql.hive.metastore.version= # spark.local.dir=/tmp -# spark.driver.userClassPathFirst=true +spark.driver.userClassPathFirst=true # spark.sql.broadcastTimeout=300 # spark.sql.crossJoin.enabled=true # spark.speculation=true diff --git a/core/pom.xml b/core/pom.xml index 5413003f..0251fbca 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -22,12 +22,12 @@ limitations under the License. com.qihoo.qsql qsql - 0.5 + 0.6 qsql-core jar - 0.5 + 0.6 qsql-core @@ -224,6 +224,12 @@ limitations under the License. mysql mysql-connector-java + + + com.oracle + ojdbc7 + 12.1.0.3 + org.apache.derby @@ -272,6 +278,7 @@ limitations under the License. httpcore ${httpcore.version} + diff --git a/core/src/main/java/com/qihoo/qsql/api/AutomaticConnection.java b/core/src/main/java/com/qihoo/qsql/api/AutomaticConnection.java index 2a75fb54..5c22f02b 100644 --- a/core/src/main/java/com/qihoo/qsql/api/AutomaticConnection.java +++ b/core/src/main/java/com/qihoo/qsql/api/AutomaticConnection.java @@ -3,6 +3,7 @@ import com.qihoo.qsql.exception.UnsupportedApiException; import com.qihoo.qsql.metadata.MetadataPostman; import com.qihoo.qsql.exec.JdbcPipeline; +import com.qihoo.qsql.plan.QueryTables; import com.qihoo.qsql.utils.SqlUtil; import java.sql.Array; import java.sql.Blob; @@ -69,8 +70,12 @@ public AutomaticConnection() throws SQLException { @Override public PreparedStatement prepareStatement(String sql) throws SQLException { - List names = SqlUtil.parseTableName(sql); + QueryTables tables = SqlUtil.parseTableName(sql); + if (tables.isDml()) { + throw new RuntimeException("Unsupported DML in JDBC model"); + } + List names = tables.tableNames; if (names.isEmpty()) { return simpleConnection.prepareStatement(sql); } diff --git a/core/src/main/java/com/qihoo/qsql/api/DynamicSqlRunner.java b/core/src/main/java/com/qihoo/qsql/api/DynamicSqlRunner.java index edf739a5..dbf3d6af 100644 --- a/core/src/main/java/com/qihoo/qsql/api/DynamicSqlRunner.java +++ b/core/src/main/java/com/qihoo/qsql/api/DynamicSqlRunner.java @@ -1,8 +1,10 @@ package com.qihoo.qsql.api; +import com.qihoo.qsql.api.SqlRunner.Builder.RunnerType; import com.qihoo.qsql.exception.QsqlException; import com.qihoo.qsql.metadata.MetadataPostman; import com.qihoo.qsql.plan.QueryProcedureProducer; +import com.qihoo.qsql.plan.QueryTables; import com.qihoo.qsql.plan.proc.DirectQueryProcedure; import com.qihoo.qsql.plan.proc.ExtractProcedure; import com.qihoo.qsql.plan.proc.PreparedExtractProcedure; @@ -66,7 +68,12 @@ private QueryProcedure createQueryPlan(String sql) { @Override public AbstractPipeline sql(String sql) { LOGGER.info("The SQL that is ready to execute is: \n" + sql); - tableNames = SqlUtil.parseTableName(sql); + QueryTables tables = SqlUtil.parseTableName(sql); + tableNames = tables.tableNames; + + if (tables.isDml()) { + environment.setTransformRunner(RunnerType.SPARK); + } LOGGER.debug("Parsed table names for upper SQL are: {}", tableNames); QueryProcedure procedure = createQueryPlan(sql); @@ -136,4 +143,7 @@ private AbstractPipeline getOrCreateClusterPipeline(QueryProcedure procedure) { return this.pipeline; } + //TODO extract all of SqlParser for parsing by one config + //TODO test set identifier escape in dialect + //TODO adjust code architecture, make jdbc and runner perform in the same way.(always translate to a new lang) } diff --git a/core/src/main/java/com/qihoo/qsql/codegen/ClassBodyComposer.java b/core/src/main/java/com/qihoo/qsql/codegen/ClassBodyComposer.java index b8f02ac8..5970bb2e 100644 --- a/core/src/main/java/com/qihoo/qsql/codegen/ClassBodyComposer.java +++ b/core/src/main/java/com/qihoo/qsql/codegen/ClassBodyComposer.java @@ -117,6 +117,7 @@ class ClassesLink extends BlockLink { super(link); } + //TODO change to generate construction dynamically @Override protected void decorateTrait(Class clazz, String... code) { if (isMyResponsibility(clazz)) { diff --git a/core/src/main/java/com/qihoo/qsql/codegen/IntegratedQueryWrapper.java b/core/src/main/java/com/qihoo/qsql/codegen/IntegratedQueryWrapper.java index b1603987..c5f7e61b 100644 --- a/core/src/main/java/com/qihoo/qsql/codegen/IntegratedQueryWrapper.java +++ b/core/src/main/java/com/qihoo/qsql/codegen/IntegratedQueryWrapper.java @@ -2,8 +2,6 @@ import com.qihoo.qsql.plan.proc.QueryProcedure; -import java.util.concurrent.atomic.AtomicInteger; - /** * Provide several method, which can generate execution code in intermediate engine layer. *

@@ -15,9 +13,6 @@ public abstract class IntegratedQueryWrapper extends ClassBodyWrapper { - protected static final String VARIABLE_PREFIX = "$"; - protected AtomicInteger varId = new AtomicInteger(0); - public abstract IntegratedQueryWrapper run(QueryProcedure plan); public abstract void interpretProcedure(QueryProcedure plan); @@ -32,7 +27,4 @@ public abstract class IntegratedQueryWrapper extends ClassBodyWrapper { public abstract void createTempTable(String tableName); - protected String latestDeclaredVariable() { - return VARIABLE_PREFIX + varId.get(); - } } diff --git a/core/src/main/java/com/qihoo/qsql/codegen/QueryGenerator.java b/core/src/main/java/com/qihoo/qsql/codegen/QueryGenerator.java index 1bc73281..02e2d6d2 100644 --- a/core/src/main/java/com/qihoo/qsql/codegen/QueryGenerator.java +++ b/core/src/main/java/com/qihoo/qsql/codegen/QueryGenerator.java @@ -8,7 +8,7 @@ import com.qihoo.qsql.codegen.spark.SparkCsvGenerator; import com.qihoo.qsql.codegen.spark.SparkElasticsearchGenerator; import com.qihoo.qsql.codegen.spark.SparkHiveGenerator; -import com.qihoo.qsql.codegen.spark.SparkMySqlGenerator; +import com.qihoo.qsql.codegen.spark.SparkJdbcGenerator; import com.qihoo.qsql.codegen.spark.SparkVirtualGenerator; import com.qihoo.qsql.plan.proc.ExtractProcedure; import com.qihoo.qsql.plan.proc.PreparedExtractProcedure; @@ -26,7 +26,7 @@ public abstract class QueryGenerator { private static QueryGenerator elasticSearch = null; private static QueryGenerator hive = null; - private static QueryGenerator mysql = null; + private static QueryGenerator jdbc = null; private static QueryGenerator virtual = null; private static QueryGenerator csv = null; @@ -53,8 +53,9 @@ public static QueryGenerator getQueryGenerator(ExtractProcedure procedure, return createHiveQueryGenerator(procedure, composer, isSpark); } else if (procedure instanceof PreparedExtractProcedure.ElasticsearchExtractor) { return createElasticsearchQueryGenerator(procedure, composer, isSpark); - } else if (procedure instanceof PreparedExtractProcedure.MySqlExtractor) { - return createMySqlQueryGenerator(procedure, composer, isSpark); + } else if (procedure instanceof PreparedExtractProcedure.MySqlExtractor + || procedure instanceof PreparedExtractProcedure.OracleExtractor) { + return createJdbcQueryGenerator(procedure, composer, isSpark); } else if (procedure instanceof PreparedExtractProcedure.VirtualExtractor) { return createVirtualQueryGenerator(procedure, composer, isSpark); } else if (procedure instanceof PreparedExtractProcedure.CsvExtractor) { @@ -100,21 +101,21 @@ private static QueryGenerator createElasticsearchQueryGenerator(ExtractProcedure return elasticSearch; } - private static QueryGenerator createMySqlQueryGenerator(ExtractProcedure procedure, + private static QueryGenerator createJdbcQueryGenerator(ExtractProcedure procedure, ClassBodyComposer composer, boolean isSpark) { - if (mysql == null) { + if (jdbc == null) { if (isSpark) { - mysql = new SparkMySqlGenerator(); + jdbc = new SparkJdbcGenerator(); } else { - mysql = new FlinkMySqlGenerator(); + jdbc = new FlinkMySqlGenerator(); } - setSpecificState(mysql, procedure, composer); - mysql.prepare(); + setSpecificState(jdbc, procedure, composer); + jdbc.prepare(); } else { - setSpecificState(mysql, procedure, composer); + setSpecificState(jdbc, procedure, composer); } - return mysql; + return jdbc; } private static QueryGenerator createVirtualQueryGenerator(ExtractProcedure procedure, @@ -166,7 +167,7 @@ private static void setSpecificState(QueryGenerator generator, public static void close() { elasticSearch = null; hive = null; - mysql = null; + jdbc = null; virtual = null; csv = null; } diff --git a/core/src/main/java/com/qihoo/qsql/codegen/flink/FlinkBodyWrapper.java b/core/src/main/java/com/qihoo/qsql/codegen/flink/FlinkBodyWrapper.java index ed854218..60367f90 100644 --- a/core/src/main/java/com/qihoo/qsql/codegen/flink/FlinkBodyWrapper.java +++ b/core/src/main/java/com/qihoo/qsql/codegen/flink/FlinkBodyWrapper.java @@ -4,7 +4,6 @@ import com.qihoo.qsql.codegen.IntegratedQueryWrapper; import com.qihoo.qsql.plan.proc.LoadProcedure; import com.qihoo.qsql.plan.proc.QueryProcedure; -import java.util.concurrent.atomic.AtomicInteger; /** * As a child of {@link IntegratedQueryWrapper}, {@link FlinkBodyWrapper} implement mixed operations code generation for @@ -19,7 +18,7 @@ public IntegratedQueryWrapper run(QueryProcedure plan) { @Override public void interpretProcedure(QueryProcedure plan) { - plan.accept(new SimpleFlinkProcVisitor(varId, composer)); + plan.accept(new SimpleFlinkProcVisitor(composer)); } @Override @@ -40,8 +39,7 @@ public void importSpecificDependency() { @Override public IntegratedQueryWrapper show() { - composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, - latestDeclaredVariable() + ".print();\n"); + composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, "tmp.print();\n"); return this; } @@ -62,9 +60,8 @@ public void createTempTable(String tableName) { private class SimpleFlinkProcVisitor extends FlinkProcedureVisitor { - SimpleFlinkProcVisitor(AtomicInteger varId, - ClassBodyComposer composer) { - super(varId, composer); + SimpleFlinkProcVisitor(ClassBodyComposer composer) { + super(composer); } @Override diff --git a/core/src/main/java/com/qihoo/qsql/codegen/flink/FlinkProcedureVisitor.java b/core/src/main/java/com/qihoo/qsql/codegen/flink/FlinkProcedureVisitor.java index 5c683f5d..ebcd0943 100644 --- a/core/src/main/java/com/qihoo/qsql/codegen/flink/FlinkProcedureVisitor.java +++ b/core/src/main/java/com/qihoo/qsql/codegen/flink/FlinkProcedureVisitor.java @@ -1,6 +1,7 @@ package com.qihoo.qsql.codegen.flink; import com.qihoo.qsql.codegen.ClassBodyComposer; +import com.qihoo.qsql.codegen.ClassBodyComposer.CodeCategory; import com.qihoo.qsql.codegen.QueryGenerator; import com.qihoo.qsql.plan.ProcedureVisitor; import com.qihoo.qsql.plan.proc.DirectQueryProcedure; @@ -8,7 +9,6 @@ import com.qihoo.qsql.plan.proc.LoadProcedure; import com.qihoo.qsql.plan.proc.QueryProcedure; import com.qihoo.qsql.plan.proc.TransformProcedure; -import java.util.concurrent.atomic.AtomicInteger; /** * For traversing procedures to generate. @@ -16,37 +16,35 @@ public class FlinkProcedureVisitor extends ProcedureVisitor { private ClassBodyComposer composer; - private AtomicInteger varId; - private String variable; - public FlinkProcedureVisitor(AtomicInteger varId, ClassBodyComposer composer) { + public FlinkProcedureVisitor(ClassBodyComposer composer) { this.composer = composer; - this.varId = varId; } @Override public void visit(ExtractProcedure extractProcedure) { - createVariableName(); + composer.handleComposition(CodeCategory.SENTENCE, "{"); QueryGenerator builder = QueryGenerator.getQueryGenerator( extractProcedure, composer, false); builder.execute(); builder.saveToTempTable(); + composer.handleComposition(CodeCategory.SENTENCE, "}"); visitNext(extractProcedure); } + //TODO Care for `tmp` is not declared. @Override public void visit(TransformProcedure transformProcedure) { composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, "Table table = tEnv.sqlQuery(\"" + transformProcedure.sql() + "\");"); composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, - "DataSet " + variable + " = tEnv.toDataSet(table, Row.class);"); + "tmp = tEnv.toDataSet(table, Row.class);"); visitNext(transformProcedure); } @Override public void visit(LoadProcedure loadProcedure) { - composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, - variable + ".print();\n"); + composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, "tmp.print();\n"); visitNext(loadProcedure); } @@ -59,8 +57,4 @@ public void visit(QueryProcedure queryProcedure) { public void visit(DirectQueryProcedure queryProcedure) { visitNext(queryProcedure); } - - protected void createVariableName() { - this.variable = "$" + (varId.incrementAndGet()); - } } diff --git a/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkBodyWrapper.java b/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkBodyWrapper.java index eeed647e..a81543df 100644 --- a/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkBodyWrapper.java +++ b/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkBodyWrapper.java @@ -2,9 +2,7 @@ import com.qihoo.qsql.codegen.ClassBodyComposer; import com.qihoo.qsql.codegen.IntegratedQueryWrapper; -import com.qihoo.qsql.plan.proc.LoadProcedure; import com.qihoo.qsql.plan.proc.QueryProcedure; -import java.util.concurrent.atomic.AtomicInteger; /** @@ -15,13 +13,13 @@ public class SparkBodyWrapper extends IntegratedQueryWrapper { @Override public IntegratedQueryWrapper run(QueryProcedure plan) { - plan.accept(new SparkProcedureVisitor(varId, composer)); + plan.accept(new SparkProcedureVisitor(composer)); return this; } @Override public void interpretProcedure(QueryProcedure plan) { - plan.accept(new SimpleSparkProcVisitor(varId, composer)); + plan.accept(new SimpleSparkProcVisitor(composer)); } @Override @@ -63,13 +61,8 @@ public void createTempTable(String tableName) { private class SimpleSparkProcVisitor extends SparkProcedureVisitor { - SimpleSparkProcVisitor(AtomicInteger varId, - ClassBodyComposer composer) { - super(varId, composer); - } - - @Override - public void visit(LoadProcedure procedure) { + SimpleSparkProcVisitor(ClassBodyComposer composer) { + super(composer); } } } diff --git a/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkMySqlGenerator.java b/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkJdbcGenerator.java similarity index 79% rename from core/src/main/java/com/qihoo/qsql/codegen/spark/SparkMySqlGenerator.java rename to core/src/main/java/com/qihoo/qsql/codegen/spark/SparkJdbcGenerator.java index bc8b97ab..03276355 100644 --- a/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkMySqlGenerator.java +++ b/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkJdbcGenerator.java @@ -1,53 +1,55 @@ -package com.qihoo.qsql.codegen.spark; - -import com.qihoo.qsql.codegen.ClassBodyComposer; -import com.qihoo.qsql.codegen.QueryGenerator; -import java.util.Properties; - -/** - * Code generator, used when {@link com.qihoo.qsql.exec.spark.SparkPipeline} is chosen and source - * data of query is in MySql at the same time. - */ -public class SparkMySqlGenerator extends QueryGenerator { - - @Override - public void importDependency() { - String[] imports = { - "import org.apache.spark.sql.Dataset", - "import org.apache.spark.sql.Row", - "import java.util.Properties", - "import com.qihoo.qsql.codegen.spark.SparkMySqlGenerator" - }; - composer.handleComposition(ClassBodyComposer.CodeCategory.IMPORT, imports); - } - - @Override - public void prepareQuery() {} - - //remember to remove temporary files after computing in hdfs or local machine - @Override - public void executeQuery() { - //TODO change to generate invoking from reflection - Invoker config = Invoker.registerMethod("SparkMySqlGenerator.config"); - String invokeWrap = config.invoke(convertProperties("jdbcUser", "jdbcPassword")); - String invoked = String.format("tmp = spark.read().jdbc(\"%s\", \"%s\", %s);", - properties.getOrDefault("jdbcUrl", ""), tableName, invokeWrap); - composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, invoked); - } - - @Override - public void saveToTempTable() { - String created = "tmp.createOrReplaceTempView(\"" + tableName + "\");"; - composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, created); - } - - /** - * . - */ - public static Properties config(String user, String password) { - Properties properties = new Properties(); - properties.put("user", user); - properties.put("password", password); - return properties; - } -} +package com.qihoo.qsql.codegen.spark; + +import com.qihoo.qsql.codegen.ClassBodyComposer; +import com.qihoo.qsql.codegen.QueryGenerator; +import java.util.Properties; + +/** + * Code generator, used when {@link com.qihoo.qsql.exec.spark.SparkPipeline} is chosen and source + * data of query is in MySql at the same time. + */ +public class SparkJdbcGenerator extends QueryGenerator { + + @Override + public void importDependency() { + String[] imports = { + "import org.apache.spark.sql.Dataset", + "import org.apache.spark.sql.Row", + "import java.util.Properties", + "import com.qihoo.qsql.codegen.spark.SparkJdbcGenerator" + }; + composer.handleComposition(ClassBodyComposer.CodeCategory.IMPORT, imports); + } + + @Override + public void prepareQuery() {} + + //remember to remove temporary files after computing in hdfs or local machine + @Override + public void executeQuery() { + //TODO change to generate invoking from reflection + Invoker config = Invoker.registerMethod("SparkJdbcGenerator.config"); + String invokeWrap = config.invoke(convertProperties("jdbcUser", "jdbcPassword", "jdbcDriver")); + String invoked = String.format("tmp = spark.read().jdbc(\"%s\", \"%s\", %s);", + properties.getOrDefault("jdbcUrl", ""), + "(" + query + ") " + tableName, invokeWrap); + composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, invoked); + } + + @Override + public void saveToTempTable() { + String created = "tmp.createOrReplaceTempView(\"" + tableName + "\");"; + composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, created); + } + + /** + * . + */ + public static Properties config(String user, String password, String driver) { + Properties properties = new Properties(); + properties.put("user", user); + properties.put("password", password); + properties.put("driver", driver); + return properties; + } +} diff --git a/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkProcedureVisitor.java b/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkProcedureVisitor.java index 1650c865..9d4bf0d0 100644 --- a/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkProcedureVisitor.java +++ b/core/src/main/java/com/qihoo/qsql/codegen/spark/SparkProcedureVisitor.java @@ -3,15 +3,14 @@ import com.qihoo.qsql.codegen.ClassBodyComposer; import com.qihoo.qsql.codegen.ClassBodyComposer.CodeCategory; import com.qihoo.qsql.codegen.QueryGenerator; +import com.qihoo.qsql.plan.ProcedureVisitor; import com.qihoo.qsql.plan.proc.DirectQueryProcedure; +import com.qihoo.qsql.plan.proc.DiskLoadProcedure; import com.qihoo.qsql.plan.proc.ExtractProcedure; import com.qihoo.qsql.plan.proc.LoadProcedure; import com.qihoo.qsql.plan.proc.MemoryLoadProcedure; import com.qihoo.qsql.plan.proc.QueryProcedure; import com.qihoo.qsql.plan.proc.TransformProcedure; -import com.qihoo.qsql.plan.ProcedureVisitor; - -import java.util.concurrent.atomic.AtomicInteger; /** * Provide several visit methods to traversing the whole {@link QueryProcedure} which will be execute on Spark. @@ -20,7 +19,7 @@ public class SparkProcedureVisitor extends ProcedureVisitor { private ClassBodyComposer composer; - public SparkProcedureVisitor(AtomicInteger varId, ClassBodyComposer composer) { + public SparkProcedureVisitor(ClassBodyComposer composer) { this.composer = composer; } @@ -46,6 +45,12 @@ public void visit(TransformProcedure transformProcedure) { public void visit(LoadProcedure loadProcedure) { if (loadProcedure instanceof MemoryLoadProcedure) { composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, "tmp.show();\n"); + } else if (loadProcedure instanceof DiskLoadProcedure) { + composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, + String.format("tmp.write().format(\"com.databricks.spark.csv\")" + + ".save(\"%s\");\n", ((DiskLoadProcedure) loadProcedure).path)); + // composer.handleComposition(ClassBodyComposer.CodeCategory.SENTENCE, + // String.format("tmp.write().text(\"%s\");\n", ((DiskLoadProcedure) loadProcedure).path)); } visitNext(loadProcedure); } diff --git a/core/src/main/java/com/qihoo/qsql/exec/JdbcPipeline.java b/core/src/main/java/com/qihoo/qsql/exec/JdbcPipeline.java index 6fb45988..a9830a45 100644 --- a/core/src/main/java/com/qihoo/qsql/exec/JdbcPipeline.java +++ b/core/src/main/java/com/qihoo/qsql/exec/JdbcPipeline.java @@ -88,9 +88,9 @@ public static Connection createSpecificConnection(String json, List pars try { Map properties = parseJsonSchema(parsedTables, json); switch (properties.get("type")) { - case "mysql": - LOGGER.debug("Connecting to MySQL server...."); - return createMySqlConnection(properties); + case "jdbc": + LOGGER.debug("Connecting to JDBC server...."); + return createJdbcConnection(properties); case "elasticsearch": LOGGER.debug("Connection to Elasticsearch server...."); return createElasticsearchConnection(json); @@ -133,8 +133,8 @@ public static Connection createSpecificConnection(List assemble return createElasticsearchConnection( "inline: " + MetadataPostman.assembleSchema(assemblers)); case JDBC: - LOGGER.debug("Connecting to MySQL server...."); - return createMySqlConnection(conn); + LOGGER.debug("Connecting to JDBC server...."); + return createJdbcConnection(conn); default: throw new RuntimeException("Unsupported jdbc type"); } @@ -154,13 +154,19 @@ private static Connection createElasticsearchConnection(String json) throws SQLE return connection; } - private static Connection createMySqlConnection(Map conn) + private static Connection createJdbcConnection(Map conn) throws ClassNotFoundException, SQLException { - Class.forName("com.mysql.jdbc.Driver"); - String ip = conn.getOrDefault("jdbcNode", ""); - String port = conn.getOrDefault("jdbcPort", ""); - String db = conn.getOrDefault("dbName", ""); - String url = conn.getOrDefault("jdbcUrl", "jdbc:mysql://" + ip + ":" + port + "/" + db); + if (! conn.containsKey("jdbcDriver")) { + throw new RuntimeException("The `jdbcDriver` property needed to be set."); + } + Class.forName(conn.get("jdbcDriver")); + // String ip = conn.getOrDefault("jdbcNode", ""); + // String port = conn.getOrDefault("jdbcPort", ""); + // String db = conn.getOrDefault("dbName", ""); + if (! conn.containsKey("jdbcUrl")) { + throw new RuntimeException("The `jdbcUrl` property needed to be set."); + } + String url = conn.get("jdbcUrl"); String user = conn.getOrDefault("jdbcUser", ""); String password = conn.getOrDefault("jdbcPassword", ""); Connection connection = DriverManager.getConnection(url, user, password); @@ -203,9 +209,14 @@ private static Map parseJsonSchema(List names, String ur * @throws SQLException sql exception */ public static Connection createCsvConnection(String json) throws SQLException { - Properties info = new Properties(); - info.put("model", json); - Connection connection = DriverManager.getConnection("jdbc:calcite:", info); + ConnectionFactory connectionFactory = new MapConnectionFactory( + ImmutableMap.of("unquotedCasing", "unchanged", "caseSensitive", "true"), + ImmutableList.of() + ).with("model", json); + + // Properties info = new Properties(); + // info.put("model", json); + Connection connection = connectionFactory.createConnection(); LOGGER.debug("Connect with embedded calcite server successfully!"); return connection; } @@ -219,7 +230,8 @@ public void run() { QueryProcedure next = procedure.next(); ResultSet resultSet = establishStatement(); - if (next instanceof DiskLoadProcedure) { + //TODO add jdbc sql translate + if (next.hasNext() && next.next() instanceof DiskLoadProcedure) { String path = ((DiskLoadProcedure) next).path; String deliminator; if (((DiskLoadProcedure) next).getDataFormat() == LoadProcedure.DataFormat.DEFAULT) { @@ -358,7 +370,7 @@ private Connection getConnection() { } enum JdbcType { - ELASTICSEARCH, MYSQL, CSV + ELASTICSEARCH, JDBC, CSV } public interface ConnectionPostProcessor { @@ -428,7 +440,7 @@ public static class JsonVisitor { private JdbcType type = null; JsonVisitor(List names) { - this.names = names.stream().collect(Collectors.toList()); + this.names = new ArrayList<>(names); } Map getConnectionInfo() { @@ -440,7 +452,7 @@ Map getConnectionInfo() { case CSV: connectionInfo.put("type", "csv"); break; - case MYSQL: + case JDBC: connectionInfo = jdbcProps.stream().reduce((left, right) -> { String leftUrl = left.getOrDefault("jdbcUrl", ""); @@ -456,7 +468,7 @@ Map getConnectionInfo() { "Not find any schema info for given table names in " + "sql")); - connectionInfo.put("type", "mysql"); + connectionInfo.put("type", "jdbc"); break; default: throw new RuntimeException("Do not support this engine type: " + type); @@ -473,10 +485,7 @@ void visit(JsonRoot jsonRoot) { } boolean visit(JsonSchema schema) { - if (schema instanceof JsonCustomSchema) { - return visit((JsonCustomSchema) schema); - } - return false; + return schema instanceof JsonCustomSchema && visit((JsonCustomSchema) schema); } boolean visit(JsonCustomSchema schema) { @@ -499,8 +508,8 @@ boolean visit(JsonCustomSchema schema) { } if (jdbcProps.size() == names.size()) { - if (schema.factory.toLowerCase().contains("mysql")) { - type = JdbcType.MYSQL; + if (schema.factory.toLowerCase().contains("jdbc")) { + type = JdbcType.JDBC; } else if (schema.factory.toLowerCase().contains("elasticsearch")) { type = JdbcType.ELASTICSEARCH; } else if (schema.factory.toLowerCase().contains("csv")) { diff --git a/core/src/main/java/com/qihoo/qsql/exec/result/JdbcPipelineResult.java b/core/src/main/java/com/qihoo/qsql/exec/result/JdbcPipelineResult.java index 64ad69d1..b25194dd 100644 --- a/core/src/main/java/com/qihoo/qsql/exec/result/JdbcPipelineResult.java +++ b/core/src/main/java/com/qihoo/qsql/exec/result/JdbcPipelineResult.java @@ -1,9 +1,16 @@ package com.qihoo.qsql.exec.result; import java.io.IOException; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; /** * Iterator of reading data from {@link JdbcPipelineResult}, which is the result of {@link @@ -41,12 +48,126 @@ public ShowPipelineResult(CloseableIterator iterator) { @Override public void run() { - if (! iterator.hasNext()) { - System.out.println("Empty set"); - } - iterator.forEachRemaining(result -> System.out.println(JdbcResultSetIterator.CONCAT_FUNC.apply(result))); + print(); close(); } + + /** + * print result set. + */ + public void print() { + try { + ResultSet resultSet = ((JdbcResultSetIterator) iterator).getResultSet(); + ResultSetMetaData meta = resultSet.getMetaData(); + int length = meta.getColumnCount(); + String[] colLabels = new String[length]; + int[] colCounts = new int[length]; + int[] types = new int[length]; + int[] changes = new int[length]; + Arrays.fill(changes, 0); + + for (int i = 0; i < meta.getColumnCount(); i++) { + colLabels[i] = meta.getColumnLabel(i + 1).toUpperCase(); + types[i] = meta.getColumnType(i + 1); + } + + fillWithDisplaySize(types, colCounts); + printResults(meta, resultSet, colLabels, colCounts, changes); + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } + + private void printResults(ResultSetMetaData meta, ResultSet resultSet, + String[] colLabels, int[] colCounts, int[] changes) throws SQLException { + StringBuilder builder = new StringBuilder(); + + for (int i = 0; i < meta.getColumnCount(); i++) { + if (colLabels[i].length() > colCounts[i]) { + changes[i] = colLabels[i].length() - colCounts[i]; + colCounts[i] = colLabels[i].length(); + } + int sep = (colCounts[i] - colLabels[i].length()); + builder.append(String.format("|%s%" + (sep == 0 ? "" : sep) + "s", colLabels[i], "")); + } + builder.append("|"); + int[] colWeights = Arrays.copyOf(colCounts, colCounts.length); + + Function component = (labels) -> { + int[] weights = new int[colWeights.length]; + for (int i = 0; i < weights.length; i++) { + weights[i] = colWeights[i] + changes[i]; + } + return weights; + }; + + Supplier framer = () -> + "+" + Arrays.stream(component.apply(colLabels)) + .mapToObj(col -> { + char[] fr = new char[col]; + Arrays.fill(fr, '-'); + return new String(fr); + }).reduce((x, y) -> x + "+" + y).orElse("") + "+"; + + if (! resultSet.next()) { + System.out.println("[Empty set]"); + return; + } + + System.out.println(framer.get()); + System.out.println(builder.toString()); + System.out.println(framer.get()); + + do { + StringBuilder line = new StringBuilder(); + for (int i = 0; i < meta.getColumnCount(); i++) { + String value = resultSet.getString(i + 1); + //bug here + if (value.length() > colCounts[i]) { + changes[i] = value.length() - colCounts[i]; + colCounts[i] = value.length(); + } + int sep = (colCounts[i] - value.length()); + line.append( + String.format("|%s%" + (sep == 0 ? "" : sep) + "s", value, "")); + } + line.append("|"); + System.out.println(line.toString()); + } + while (resultSet.next()); + + System.out.println(framer.get()); + } + + private void fillWithDisplaySize(int[] type, int[] colCounts) { + for (int i = 0; i < type.length; i++) { + switch (type[i]) { + case Types.BOOLEAN: + case Types.TINYINT: + case Types.SMALLINT: + colCounts[i] = 4; + break; + case Types.INTEGER: + case Types.BIGINT: + case Types.REAL: + case Types.FLOAT: + case Types.DOUBLE: + colCounts[i] = 8; + break; + case Types.CHAR: + case Types.VARCHAR: + colCounts[i] = 20; + break; + case Types.DATE: + case Types.TIME: + case Types.TIMESTAMP: + colCounts[i] = 20; + break; + default: + colCounts[i] = 20; + } + } + } } /** diff --git a/core/src/main/java/com/qihoo/qsql/exec/result/JdbcResultSetIterator.java b/core/src/main/java/com/qihoo/qsql/exec/result/JdbcResultSetIterator.java index 7ca4e98d..159609d6 100644 --- a/core/src/main/java/com/qihoo/qsql/exec/result/JdbcResultSetIterator.java +++ b/core/src/main/java/com/qihoo/qsql/exec/result/JdbcResultSetIterator.java @@ -76,4 +76,8 @@ public void close() { } } } + + public ResultSet getResultSet() { + return resultSet; + } } diff --git a/core/src/main/java/com/qihoo/qsql/exec/spark/SparkPipeline.java b/core/src/main/java/com/qihoo/qsql/exec/spark/SparkPipeline.java index 8c79a8c0..cdd52c07 100644 --- a/core/src/main/java/com/qihoo/qsql/exec/spark/SparkPipeline.java +++ b/core/src/main/java/com/qihoo/qsql/exec/spark/SparkPipeline.java @@ -95,6 +95,6 @@ public void shutdown() { @Override public String source() { - return wrapper.show().toString(); + return wrapper.toString(); } } diff --git a/core/src/main/java/com/qihoo/qsql/launcher/ArgumentsSupplier.java b/core/src/main/java/com/qihoo/qsql/launcher/ArgumentsSupplier.java index 522fbfc4..a995b057 100644 --- a/core/src/main/java/com/qihoo/qsql/launcher/ArgumentsSupplier.java +++ b/core/src/main/java/com/qihoo/qsql/launcher/ArgumentsSupplier.java @@ -46,6 +46,8 @@ public List assemblySparkOptions() { arguments.add(parser.getOptionValue(OptionsParser.SubmitOption.JAR_NAME)); arguments.add(longSparkOpt("jar")); arguments.add(parser.getOptionValue(SubmitOption.JAR)); + arguments.add(longSparkOpt("master")); + arguments.add(parser.getOptionValue(SubmitOption.MASTER_MODE)); arguments.add(longSparkOpt("runner")); arguments.add(parser.getOptionValue(OptionsParser.SubmitOption.RUNNER)); return arguments; diff --git a/core/src/main/java/com/qihoo/qsql/launcher/ExecutionDispatcher.java b/core/src/main/java/com/qihoo/qsql/launcher/ExecutionDispatcher.java index 5c662223..bd39b5ce 100644 --- a/core/src/main/java/com/qihoo/qsql/launcher/ExecutionDispatcher.java +++ b/core/src/main/java/com/qihoo/qsql/launcher/ExecutionDispatcher.java @@ -9,15 +9,17 @@ import com.qihoo.qsql.exec.AbstractPipeline; import com.qihoo.qsql.exec.JdbcPipeline; import com.qihoo.qsql.exec.result.CloseableIterator; +import com.qihoo.qsql.exec.result.JdbcPipelineResult; import com.qihoo.qsql.exec.result.JdbcResultSetIterator; import com.qihoo.qsql.launcher.OptionsParser.SubmitOption; import com.qihoo.qsql.metadata.MetadataMapping; import com.qihoo.qsql.metadata.MetadataPostman; import com.qihoo.qsql.metadata.SchemaAssembler; import com.qihoo.qsql.plan.QueryProcedureProducer; +import com.qihoo.qsql.plan.QueryTables; import com.qihoo.qsql.plan.proc.QueryProcedure; +import com.qihoo.qsql.utils.PropertiesReader; import com.qihoo.qsql.utils.SqlUtil; -import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.Connection; @@ -27,7 +29,6 @@ import java.util.Base64; import java.util.List; import org.apache.commons.cli.ParseException; -import org.apache.log4j.PropertyConfigurator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,7 +40,7 @@ public class ExecutionDispatcher { private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionDispatcher.class); static { - config(); + PropertiesReader.configLogger(); } /** @@ -57,11 +58,17 @@ public static void main(String[] args) throws } OptionsParser parser = new OptionsParser(args); - String sqlArg = parser.getOptionValue(SubmitOption.SQL); + final String sqlArg = parser.getOptionValue(SubmitOption.SQL); String runner = parser.getOptionValue(SubmitOption.RUNNER); String sql = new String(Base64.getDecoder().decode(sqlArg), StandardCharsets.UTF_8); - List tableNames = SqlUtil.parseTableName(sql); + LOGGER.info("Your SQL is '{}'", sql); + QueryTables tables = SqlUtil.parseTableName(sql); + List tableNames = tables.tableNames; + + if (tables.isDml()) { + runner = "SPARK"; + } welcome(); long latestTime = System.currentTimeMillis(); @@ -96,7 +103,7 @@ public static void main(String[] args) throws ProcessExecClient execClient = ProcessExecClient.createProcessClient(pipeline, parser); execClient.exec(); - System.out.printf("(%.2f sec)\n", ((double) (System.currentTimeMillis() - latestTime) / 1000)); + System.out.printf("(%.2f sec)\r\n", ((double) (System.currentTimeMillis() - latestTime) / 1000)); } private static boolean tryToExecuteQueryDirectly(String sql, List tableNames, String runner) @@ -133,11 +140,7 @@ private static void executeJdbcQuery(Connection connection, String sql) { try (PreparedStatement statement = connection.prepareStatement(sql)) { ResultSet resultSet = statement.executeQuery(); try (CloseableIterator iterator = new JdbcResultSetIterator<>(resultSet)) { - if (! iterator.hasNext()) { - System.out.println("[Empty Set]"); - } - iterator.forEachRemaining(result -> - System.out.println(JdbcResultSetIterator.CONCAT_FUNC.apply(result))); + new JdbcPipelineResult.ShowPipelineResult(iterator).print(); } catch (IOException ex) { throw new RuntimeException(ex); } @@ -182,17 +185,9 @@ private static void welcome() { + " / __ \\__ __(_)____/ /__/ ___// __ \\ / / \n" + " / / / / / / / / ___/ //_/\\__ \\/ / / / / / \n" + " / /_/ / /_/ / / /__/ ,< ___/ / /_/ / / /___\n" - + "Welcome to \\___\\_\\__,_/_/\\___/_/|_|/____/\\___\\_\\/_____/ version 0.5."; + + "Welcome to \\___\\_\\__,_/_/\\___/_/|_|/____/\\___\\_\\/_____/ version 0.6."; String slogan = " \\ Process data placed anywhere with the most flexible SQL /"; System.out.println(welcome); System.out.println(slogan); } - - private static void config() { - String logProp; - if (((logProp = System.getenv("QSQL_HOME")) != null) && ! logProp.isEmpty()) { - PropertyConfigurator.configure(logProp - + File.separator + "conf" + File.separator + "log4j.properties"); - } - } } diff --git a/core/src/main/java/com/qihoo/qsql/launcher/OptionsParser.java b/core/src/main/java/com/qihoo/qsql/launcher/OptionsParser.java index d753c04a..ce5cee92 100644 --- a/core/src/main/java/com/qihoo/qsql/launcher/OptionsParser.java +++ b/core/src/main/java/com/qihoo/qsql/launcher/OptionsParser.java @@ -52,7 +52,6 @@ public String getOptionValue(SubmitOption option) { } } - public enum SubmitOption { CLASS_NAME("class_name", "", null, null), JAR_NAME("jar_name", "", "'non-opt'", null), @@ -61,7 +60,7 @@ public enum SubmitOption { WORKER_MEMORY("worker_memory", "1G", "executor-memory", null), DRIVER_MEMORY("driver_memory", "3G", "driver-memory", null), WORKER_NUM("worker_num", "20", "num-executors", null), - RUNNER("runner", "jdbc", null, null), + RUNNER("runner", "dynamic", null, null), SQL("sql", "", null, null), APP_NAME("app_name", "", null, null), FILE("file", "", null, null); diff --git a/core/src/main/java/com/qihoo/qsql/launcher/ProcessExecutor.java b/core/src/main/java/com/qihoo/qsql/launcher/ProcessExecutor.java index 6e62db84..280fb558 100644 --- a/core/src/main/java/com/qihoo/qsql/launcher/ProcessExecutor.java +++ b/core/src/main/java/com/qihoo/qsql/launcher/ProcessExecutor.java @@ -37,10 +37,12 @@ public static void main(String[] args) { Option optionJars = Option.builder().longOpt("jar").hasArg().desc("jars").build(); Option optionAdapter = Option.builder().longOpt("runner").hasArg().desc("compute runner type").build(); Option optionAppName = Option.builder().longOpt("app_name").hasArg().desc("app name").build(); + Option optionMaster = Option.builder().longOpt("master").hasArg().desc("master").build(); Options options = new Options(); options.addOption(optionSourceCode).addOption(optionClassName) - .addOption(optionAdapter).addOption(optionAppName).addOption(optionJars); + .addOption(optionAdapter).addOption(optionAppName) + .addOption(optionJars).addOption(optionMaster); CommandLineParser parser = new DefaultParser(); String className; @@ -48,6 +50,7 @@ public static void main(String[] args) { String runner; String appName = "QSQL-" + UUID.randomUUID(); String extraJars; + String master; try { CommandLine commandLine = parser.parse(options, args); @@ -57,11 +60,13 @@ public static void main(String[] args) { if (commandLine.hasOption("source") && commandLine.hasOption("class_name") && commandLine.hasOption("runner") - && commandLine.hasOption("jar")) { + && commandLine.hasOption("jar") + && commandLine.hasOption("master")) { source = new String(Base64.getDecoder().decode(commandLine.getOptionValue("source"))); className = commandLine.getOptionValue("class_name"); runner = commandLine.getOptionValue("runner"); extraJars = commandLine.getOptionValue("jar"); + master = commandLine.getOptionValue("master"); } else { throw new RuntimeException("Options --source or --className or --runner not found"); } @@ -70,11 +75,12 @@ public static void main(String[] args) { } ProcessExecutor executor = new ProcessExecutor(); - executor.execute(source, className, runner, appName, extraJars); + executor.execute(source, className, runner, appName, extraJars, master); } @SuppressWarnings("unchecked") - private void execute(String source, String className, String runner, String appName, String extraJars) { + private void execute(String source, String className, + String runner, String appName, String extraJars, String master) { Class requirementClass; try { requirementClass = ClassBodyWrapper.compileSourceAndLoadClass( @@ -84,12 +90,24 @@ private void execute(String source, String className, String runner, String appN } switch (runner.toUpperCase()) { - case "DYNAMIC": - case "SPARK": + case "FLINK": + try { + final Constructor constructor = + ((Class) requirementClass).getConstructor(ExecutionEnvironment.class); + + ExecutionEnvironment executionEnvironment = ExecutionEnvironment.getExecutionEnvironment(); + constructor.newInstance(executionEnvironment).execute(); + } catch (NoSuchMethodException | IllegalAccessException + | InvocationTargetException | InstantiationException ex) { + throw new RuntimeException(ex); + } + break; + default: try { final Constructor constructor = ((Class) requirementClass).getConstructor(SparkSession.class); SparkSession sc = SparkSession.builder() + .master(master) .appName(appName) .enableHiveSupport() .getOrCreate(); @@ -101,19 +119,6 @@ private void execute(String source, String className, String runner, String appN throw new RuntimeException(ex); } break; - case "FLINK": - try { - final Constructor constructor = - ((Class) requirementClass).getConstructor(ExecutionEnvironment.class); - - ExecutionEnvironment executionEnvironment = ExecutionEnvironment.getExecutionEnvironment(); - constructor.newInstance(executionEnvironment).execute(); - } catch (NoSuchMethodException | IllegalAccessException - | InvocationTargetException | InstantiationException ex) { - throw new RuntimeException(ex); - } - break; - default: } } } diff --git a/core/src/main/java/com/qihoo/qsql/metadata/MetadataClient.java b/core/src/main/java/com/qihoo/qsql/metadata/MetadataClient.java index db6c39a7..c9b56196 100644 --- a/core/src/main/java/com/qihoo/qsql/metadata/MetadataClient.java +++ b/core/src/main/java/com/qihoo/qsql/metadata/MetadataClient.java @@ -15,12 +15,18 @@ import java.util.ArrayList; import java.util.List; import java.util.Properties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provide methods to fetch data from metastore. */ +//TODO replace with spring+mybatis public class MetadataClient implements AutoCloseable { + private static final Logger LOGGER = LoggerFactory.getLogger(MetadataClient.class); + + //TODO think about more than one metastore private static Properties properties; static { @@ -40,12 +46,14 @@ public MetadataClient() throws SQLException { /** * select by dbId. + * * @param dbId db identifier * @return database value */ public DatabaseValue getBasicDatabaseInfoById(Long dbId) { DatabaseValue databaseValue = null; - String sql = String.format("select DB_ID,NAME,DB_TYPE from DBS where DB_ID ='%d'", dbId); + String sql = String.format("select DB_ID, NAME, DB_TYPE from DBS where DB_ID ='%d'", dbId); + LOGGER.debug("getBasicDatabaseInfoById sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { try (ResultSet resultSet = preparedStatement.executeQuery()) { if (resultSet != null && resultSet.next()) { @@ -63,12 +71,14 @@ public DatabaseValue getBasicDatabaseInfoById(Long dbId) { /** * select by dbName. + * * @param databaseName database name * @return database value */ public DatabaseValue getBasicDatabaseInfo(String databaseName) { DatabaseValue databaseValue = null; - String sql = String.format("select DB_ID, `DESC`, NAME,DB_TYPE from DBS where name ='%s'", databaseName); + String sql = String.format("select DB_ID, `DESC`, NAME, DB_TYPE from DBS where name ='%s'", databaseName); + LOGGER.debug("getBasicDatabaseInfo sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { try (ResultSet resultSet = preparedStatement.executeQuery()) { if (resultSet != null && resultSet.next()) { @@ -87,20 +97,24 @@ public DatabaseValue getBasicDatabaseInfo(String databaseName) { /** * insert data value into table. + * * @param value data value */ - public void insertBasicDatabaseInfo(DatabaseValue value) { + public Long insertBasicDatabaseInfo(DatabaseValue value) { String sql = String.format("INSERT INTO DBS(NAME, DB_TYPE, `DESC`) VALUES('%s', '%s', '%s')", value.getName(), value.getDbType(), value.getDesc()); + LOGGER.debug("insertBasicDatabaseInfo sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { preparedStatement.execute(); } catch (SQLException ex) { throw new RuntimeException(ex); } + return getLastInsertPrimaryKey(); } /** * insert schema params into table. + * * @param values database params */ public void insertDatabaseSchema(List values) { @@ -110,7 +124,8 @@ public void insertDatabaseSchema(List values) { "(" + value.getDbId() + ", '" + value.getParamKey() + "', '" + value.getParamValue() + "')") .reduce((left, right) -> left + ", " + right).orElse("()"); String sql = String.format("INSERT INTO DATABASE_PARAMS(DB_ID, PARAM_KEY, PARAM_VALUE) VALUES %s", - waitedForInsert); + waitedForInsert); + LOGGER.debug("insertDatabaseSchema sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { preparedStatement.execute(); } catch (SQLException ex) { @@ -120,12 +135,14 @@ public void insertDatabaseSchema(List values) { /** * d. + * * @param databaseId wait * @return wait */ public List getDatabaseSchema(Long databaseId) { List databaseParams = new ArrayList<>(); String sql = String.format("select PARAM_KEY,PARAM_VALUE from DATABASE_PARAMS where DB_ID='%d'", databaseId); + LOGGER.debug("getDatabaseSchema sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { try (ResultSet resultSet = preparedStatement.executeQuery()) { if (resultSet != null) { @@ -146,26 +163,31 @@ public List getDatabaseSchema(Long databaseId) { /** * insert table schema into table. + * * @param value data value */ - public void insertTableSchema(TableValue value) { + public Long insertTableSchema(TableValue value) { String sql = String.format("INSERT INTO TBLS(DB_ID, TBL_NAME) VALUES(%s, '%s')", value.getDbId(), value.getTblName()); + LOGGER.debug("insertTableSchema sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { preparedStatement.execute(); } catch (SQLException ex) { throw new RuntimeException(ex); } + return getLastInsertPrimaryKey(); } /** * d. + * * @param tableName wait * @return wait */ public List getTableSchema(String tableName) { List tbls = new ArrayList<>(); String sql = String.format("select DB_ID,TBL_ID,TBL_NAME from TBLS where TBL_NAME='%s'", tableName); + LOGGER.debug("getTableSchema sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { try (ResultSet resultSet = preparedStatement.executeQuery()) { if (resultSet != null) { @@ -186,6 +208,7 @@ public List getTableSchema(String tableName) { /** * insert table columns into table. + * * @param columns data value */ public void insertFieldsSchema(List columns) { @@ -197,6 +220,20 @@ public void insertFieldsSchema(List columns) { .reduce((left, right) -> left + ", " + right).orElse("()"); String sql = String.format( "INSERT INTO COLUMNS(CD_ID, COLUMN_NAME,TYPE_NAME,INTEGER_IDX) VALUES %s", waitedForInsert); + LOGGER.debug("insertFieldsSchema sql is {}", sql); + try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { + preparedStatement.execute(); + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } + + /** + * . + */ + public void deleteFieldsSchema(Long tbId) { + String sql = String.format("DELETE FROM COLUMNS WHERE CD_ID = %s", tbId.toString()); + LOGGER.debug("deleteFieldsSchema sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { preparedStatement.execute(); } catch (SQLException ex) { @@ -206,6 +243,7 @@ public void insertFieldsSchema(List columns) { /** * d. + * * @param tableId wait * @return wait */ @@ -214,6 +252,7 @@ public List getFieldsSchema(Long tableId) { String sql = String.format("" + "SELECT COLUMN_NAME,TYPE_NAME,INTEGER_IDX FROM COLUMNS WHERE CD_ID='%s' ORDER BY " + "INTEGER_IDX", tableId); + LOGGER.debug("getFieldsSchema sql is {}", sql); try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { try (ResultSet resultSet = preparedStatement.executeQuery()) { if (resultSet != null) { @@ -232,6 +271,20 @@ public List getFieldsSchema(Long tableId) { } } + //NOT THREAD-SAFE + private Long getLastInsertPrimaryKey() { + try (PreparedStatement preparedStatement = + connection.prepareStatement(getLastInsertSql())) { + ResultSet resultSet = preparedStatement.executeQuery(); + if (! resultSet.next()) { + throw new RuntimeException("Execute `SELECT LAST_INSERT_ID()` failed!!"); + } + return resultSet.getLong(1); + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } + private Connection createConnection() throws SQLException { if (! MetaConnectionUtil.isEmbeddedDatabase(properties)) { return MetaConnectionUtil.getExternalConnection(properties); @@ -271,4 +324,19 @@ public void close() { throw new RuntimeException(ex); } } + + /** + * . + */ + public String getLastInsertSql() throws SQLException { + String driver = connection.getMetaData().getDriverName().toLowerCase(); + if (driver.contains("mysql")) { + return "SELECT LAST_INSERT_ID()"; + } else if (driver.contains("sqlite")) { + return "SELECT LAST_INSERT_ROWID()"; + } else { + throw new RuntimeException( + "Metadata collection for this type of data source is not supported!!"); + } + } } diff --git a/core/src/main/java/com/qihoo/qsql/metadata/MetadataMapping.java b/core/src/main/java/com/qihoo/qsql/metadata/MetadataMapping.java index db688a15..9233ba7d 100644 --- a/core/src/main/java/com/qihoo/qsql/metadata/MetadataMapping.java +++ b/core/src/main/java/com/qihoo/qsql/metadata/MetadataMapping.java @@ -20,10 +20,10 @@ public enum MetadataMapping { /** * use '%' and literal 'value' to complete mapping. */ - JDBC("org.apache.calcite.adapter.mysql.MySQLSchemaFactory", - "org.apache.calcite.adapter.mysql.MySQLTableFactory", + JDBC("org.apache.calcite.adapter.custom.JdbcSchemaFactory", + "org.apache.calcite.adapter.custom.JdbcTableFactory", Arrays.asList( - "dbName", "tableName", "jdbcDriver", + "dbName", "tableName", "dbType", "jdbcDriver", "jdbcUrl", "jdbcUser", "jdbcPassword"), Collections.emptyList() ), @@ -34,6 +34,11 @@ public enum MetadataMapping { "dbName", "tableName", "cluster"), Collections.emptyList()); + public static final String ELASTICSEARCH = "es"; + public static final String MYSQL = "mysql"; + public static final String ORACLE = "oracle"; + public static final String HIVE = "hive"; + private static final String JOINT_FLAG = "%"; String schemaClass; String tableClass; @@ -50,11 +55,12 @@ public enum MetadataMapping { static MetadataMapping convertToAdapter(String name) { switch (name.toLowerCase()) { - case "es": + case ELASTICSEARCH: return MetadataMapping.Elasticsearch; - case "mysql": + case MYSQL: + case ORACLE: return MetadataMapping.JDBC; - case "hive": + case HIVE: return MetadataMapping.Hive; default: throw new RuntimeException("Not support given adapter name!!"); diff --git a/core/src/main/java/com/qihoo/qsql/metadata/MetadataPostman.java b/core/src/main/java/com/qihoo/qsql/metadata/MetadataPostman.java index cd92b27b..aced0b7d 100644 --- a/core/src/main/java/com/qihoo/qsql/metadata/MetadataPostman.java +++ b/core/src/main/java/com/qihoo/qsql/metadata/MetadataPostman.java @@ -128,12 +128,13 @@ private SchemaAssembler transformSchemaFormat() { LOGGER.debug("Received fields about table {} are {}", tableName, columnValues); + String dbType = databaseValue.getDbType().toLowerCase(); String dbName = databaseValue.getName(); String tbName = theUniqueTable.getTblName(); calciteProperties.put("dbName", dbName); calciteProperties.put("tableName", tbName); - - MetadataMapping calciteMeta = MetadataMapping.convertToAdapter(databaseValue.getDbType().toLowerCase()); + calciteProperties.put("dbType", dbType); + MetadataMapping calciteMeta = MetadataMapping.convertToAdapter(dbType); calciteMeta.completeComponentProperties(calciteProperties); return new SchemaAssembler(dbName, tbName, calciteMeta, calciteProperties, columnValues); diff --git a/core/src/main/java/com/qihoo/qsql/metadata/SchemaAssembler.java b/core/src/main/java/com/qihoo/qsql/metadata/SchemaAssembler.java index 0b3e046c..7dc2fc6b 100644 --- a/core/src/main/java/com/qihoo/qsql/metadata/SchemaAssembler.java +++ b/core/src/main/java/com/qihoo/qsql/metadata/SchemaAssembler.java @@ -92,19 +92,11 @@ private String reduceJsonTable(SchemaAssembler schemaAssembler) { //maybe exist same db name problem private String reduceJsonSchemaOperand() { - String coordinates = new StringBuilder("{'") - .append(connProperties.getOrDefault("esNodes", "")) - .append("': ") - .append(connProperties.getOrDefault("esPort", "")) - .append("}") - .toString(); - String userConfig = new StringBuilder("{'bulk.flush.max.actions': 10, ") - .append("'bulk.flush.max.size.mb': 1,") - .append("'esUser':'").append(connProperties.getOrDefault("esUser", "")) - .append("',") - .append("'esPass':'").append(connProperties.getOrDefault("esPass", "")) - .append("'}") - .toString(); + String coordinates = "{'" + connProperties.getOrDefault("esNodes", "") + + "': " + connProperties.getOrDefault("esPort", "") + "}"; + String userConfig = "{'bulk.flush.max.actions': 10, " + "'bulk.flush.max.size.mb': 1," + + "'esUser':'" + connProperties.getOrDefault("esUser", "") + + "'," + "'esPass':'" + connProperties.getOrDefault("esPass", "") + "'}"; return Stream.of( formatPlainProperty("coordinates", coordinates), formatPlainProperty("userConfig", userConfig), @@ -130,59 +122,51 @@ private String reduceJsonTableOperand(Map properties, MetadataMa private String reduceJsonFields(List fields) { return fields.stream() .filter(field -> ! (field.getColumnName().isEmpty() || field.getTypeName().isEmpty())) - .map(field -> { - String type = field.getTypeName(); - switch (type.trim()) { - case "date": - case "string": - case "int": - return field.toString(); - default: - ColumnValue value = new ColumnValue(); - value.setColumnName(field.getColumnName()); - value.setTypeName("string"); - return value.toString(); - } - }) + .map(this::convertFieldType) .reduce((left, right) -> left + ",\n" + right) .orElse(""); } private String formatPlainProperty(String key, String value) { - return new StringBuilder("\"") - .append(key) - .append("\": ") - .append("\"") - .append(value) - .append("\"") - .toString(); + return "\"" + key + "\": " + "\"" + value + "\""; } private String formatObjectProperty(String key, String value) { - return new StringBuilder("\"") - .append(key) - .append("\": ") - .append("{\n") - .append(value) - .append("\n}") - .toString(); + return "\"" + key + "\": " + "{\n" + value + "\n}"; } private String formatElementProperty(String value) { - return new StringBuilder("{\n") - .append(value) - .append("\n}") - .toString(); + return "{\n" + value + "\n}"; } private String formatArrayProperty(String key, String value) { - return new StringBuilder("\"") - .append(key) - .append("\": ") - .append("[\n") - .append(value) - .append("\n]") - .toString(); + return "\"" + key + "\": " + "[\n" + value + "\n]"; + } + + private String convertFieldType(ColumnValue columnValue) { + switch (columnValue.getTypeName().trim().toUpperCase()) { + case "INT": + case "INTEGER": + case "STRING": + case "VARCHAR": + case "TINYINT": + case "SMALLINT": + case "BIGINT": + case "FLOAT": + case "DOUBLE": + case "LONG": + case "BOOLEAN": + case "ARRAY": + case "MAP": + case "DATE": + case "TIMESTAMP": + return columnValue.toString(); + default: + ColumnValue value = new ColumnValue(); + value.setColumnName(columnValue.getColumnName()); + value.setTypeName("STRING"); + return value.toString(); + } } } diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/ElasticsearchCollector.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/ElasticsearchCollector.java new file mode 100644 index 00000000..356131ce --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/ElasticsearchCollector.java @@ -0,0 +1,232 @@ +package com.qihoo.qsql.metadata.collect; + + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import com.google.common.collect.Sets; +import com.qihoo.qsql.metadata.collect.dto.ElasticsearchProp; +import com.qihoo.qsql.metadata.entity.ColumnValue; +import com.qihoo.qsql.metadata.entity.DatabaseParamValue; +import com.qihoo.qsql.metadata.entity.DatabaseValue; +import com.qihoo.qsql.metadata.entity.TableValue; +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; + +public class ElasticsearchCollector extends MetadataCollector { + + private ElasticsearchProp prop; + private RestClient restClient; + private ObjectMapper mapper = new ObjectMapper(); + + /** + * . + */ + public ElasticsearchCollector(ElasticsearchProp prop, String filter) throws SQLException { + super(filter); + this.prop = prop; + Map coordinates = new HashMap<>(); + coordinates.put(prop.getEsNodes(), prop.getEsPort()); + Map userConfig = new HashMap<>(); + userConfig.put("esUser", prop.getEsUser()); + userConfig.put("esPass", prop.getEsPass()); + this.restClient = connect(coordinates, userConfig); + } + + @Override + protected DatabaseValue convertDatabaseValue() { + DatabaseValue value = new DatabaseValue(); + value.setDbType("es"); + value.setDesc("Who am I"); + String indexWithType = prop.getEsIndex(); + if (indexWithType.lastIndexOf("/") == - 1) { + value.setName(indexWithType); + } else { + value.setName(indexWithType.substring(0, indexWithType.lastIndexOf("/"))); + } + return value; + } + + @Override + protected List convertDatabaseParamValue(Long dbId) { + DatabaseParamValue[] paramValues = new DatabaseParamValue[6]; + for (int i = 0; i < paramValues.length; i++) { + paramValues[i] = new DatabaseParamValue(dbId); + } + paramValues[0].setParamKey("esNodes").setParamValue(prop.getEsNodes()); + paramValues[1].setParamKey("esPort").setParamValue(Integer.toString(prop.getEsPort())); + paramValues[2].setParamKey("esUser").setParamValue(prop.getEsUser()); + paramValues[3].setParamKey("esPass").setParamValue(prop.getEsPass()); + paramValues[4].setParamKey("esIndex").setParamValue(prop.getEsIndex()); + paramValues[5].setParamKey("esScrollNum").setParamValue("1"); + return Arrays.stream(paramValues).collect(Collectors.toList()); + } + + @Override + protected TableValue convertTableValue(Long dbId, String tableName) { + TableValue value = new TableValue(); + value.setTblName(tableName); + value.setDbId(dbId); + value.setCreateTime(new Date().toString()); + return value; + } + + @Override + protected List convertColumnValue(Long tbId, String tableName, String dbName) { + try { + List columns = listFieldTypesFroElastic(dbName, tableName); + for (int i = 0; i < columns.size(); i++) { + columns.get(i).setIntegerIdx(i + 1); + columns.get(i).setComment("Who am I? 24601!!"); + columns.get(i).setCdId(tbId); + } + return columns; + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + } + + @Override + protected List getTableNameList() { + try { + String regexp = filterRegexp.replaceAll("\\.", "\\.") + .replaceAll("\\?", ".") + .replaceAll("%", ".*") + .replaceAll("_", ".?"); + return listTypesFromElastic(prop.getEsIndex()).stream().filter(line -> { + Pattern pattern = Pattern.compile(regexp); + return pattern.matcher(line).matches(); + }).collect(Collectors.toList()); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + } + + private static RestClient connect(Map coordinates, + Map userConfig) { + Objects.requireNonNull(coordinates, "coordinates"); + Preconditions.checkArgument(! coordinates.isEmpty(), "no ES coordinates specified"); + final Set set = new LinkedHashSet<>(); + for (Map.Entry entry : coordinates.entrySet()) { + set.add(new HttpHost(entry.getKey(), entry.getValue())); + } + + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, + new UsernamePasswordCredentials(userConfig.getOrDefault("esUser", "none"), + userConfig.getOrDefault("esPass", "none"))); + + return RestClient.builder(set.toArray(new HttpHost[0])) + .setHttpClientConfigCallback(httpClientBuilder -> + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)) + .setMaxRetryTimeoutMillis(300000).build(); + } + + private Set listTypesFromElastic(String index) throws IOException { + final String endpoint = "/" + index + "/_mapping"; + final Response response = restClient.performRequest("GET", endpoint); + try (InputStream is = response.getEntity().getContent()) { + JsonNode root = mapper.readTree(is); + if (! root.isObject() || root.size() != 1) { + final String message = String.format(Locale.ROOT, "Invalid response for %s/%s " + + "Expected object of size 1 got %s (of size %d)", response.getHost(), + response.getRequestLine(), root.getNodeType(), root.size()); + throw new IllegalStateException(message); + } + + JsonNode mappings = root.iterator().next().get("mappings"); + if (mappings == null || mappings.size() == 0) { + final String message = String.format(Locale.ROOT, "Index %s does not have any types", + index); + throw new IllegalStateException(message); + } + + Set types = Sets.newHashSet(mappings.fieldNames()); + types.remove("_default_"); + return types; + } + } + + private List listFieldTypesFroElastic(String index, String type) throws IOException { + final String endpoint = "/" + index + "/_mapping"; + final Response response = restClient.performRequest("GET", endpoint); + try (InputStream is = response.getEntity().getContent()) { + JsonNode root = mapper.readTree(is); + if (! root.isObject() || root.size() != 1) { + final String message = String.format(Locale.ROOT, "Invalid response for %s/%s " + + "Expected object of size 1 got %s (of size %d)", response.getHost(), + response.getRequestLine(), root.getNodeType(), root.size()); + throw new IllegalStateException(message); + } + + JsonNode mappings = root.iterator().next().get("mappings"); + if (! mappings.has(type)) { + throw new IllegalStateException("Type " + type + " not found."); + } + + JsonNode typeObject = mappings.get(type); + JsonNode properties = typeObject.get("properties"); + List columnValues = new ArrayList<>(); + + properties.fieldNames().forEachRemaining(name -> { + ColumnValue value = new ColumnValue(); + value.setComment("Who am I?"); + value.setColumnName(name); + value.setTypeName(convertDataType(properties.get(name).get("type").asText())); + columnValues.add(value); + }); + return columnValues; + } + } + + private String convertDataType(String esType) { + String type = esType.toLowerCase(); + switch (type) { + case "integer": + case "double": + case "date": + case "boolean": + case "float": + return type; + case "text": + case "keyword": + case "ip": + return "string"; + case "long": + return "bigint"; + case "short": + return "smallint"; + case "byte": + return "tinyint"; + case "half_float": + case "scaled_float": + return "float"; + case "binary": + case "object": + case "nested": + throw new RuntimeException("The current version does not support complex types"); + default: + throw new IllegalStateException("Unknown type"); + } + } +} diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/HiveCollector.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/HiveCollector.java new file mode 100644 index 00000000..06803389 --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/HiveCollector.java @@ -0,0 +1,149 @@ +package com.qihoo.qsql.metadata.collect; + +import com.qihoo.qsql.metadata.collect.dto.HiveProp; +import com.qihoo.qsql.metadata.entity.ColumnValue; +import com.qihoo.qsql.metadata.entity.DatabaseParamValue; +import com.qihoo.qsql.metadata.entity.DatabaseValue; +import com.qihoo.qsql.metadata.entity.TableValue; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Objects; +import org.apache.commons.lang3.StringUtils; + +public class HiveCollector extends MetadataCollector { + + //read from mysql database. + private HiveProp prop; + private Connection connection; + + HiveCollector(HiveProp prop, String filterRegexp) throws SQLException, ClassNotFoundException { + super(filterRegexp); + this.prop = prop; + Class.forName(prop.getJdbcDriver()); + connection = DriverManager.getConnection(prop.getJdbcUrl(), + prop.getJdbcUser(), prop.getJdbcPassword()); + } + + @Override + protected DatabaseValue convertDatabaseValue() { + DatabaseValue value = new DatabaseValue(); + value.setDbType("hive"); + value.setDesc("Who am I"); + value.setName(getDatabasePosition()); + return value; + } + + @Override + protected List convertDatabaseParamValue(Long dbId) { + DatabaseParamValue value = new DatabaseParamValue(dbId); + value.setParamKey("cluster").setParamValue("default"); + List values = new ArrayList<>(); + values.add(value); + return values; + } + + @Override + protected TableValue convertTableValue(Long dbId, String tableName) { + TableValue value = new TableValue(); + value.setTblName(tableName); + value.setDbId(dbId); + value.setCreateTime(new Date().toString()); + return value; + } + + @Override + protected List convertColumnValue(Long tbId, String tableName, String dbName) { + + List columns = new ArrayList<>(); + // read Columns + String sql = String.format("" + + "SELECT COLUMNS_V2.* " + + "FROM COLUMNS_V2, SDS, TBLS, DBS " + + "WHERE COLUMNS_V2.CD_ID = SDS.CD_ID " + + "AND SDS.SD_ID = TBLS.SD_ID " + + "AND TBLS.DB_ID = DBS.DB_ID " + + "AND TBLS.TBL_NAME='%s' " + + "AND DBS.NAME = '%s' ", + tableName, dbName); + + columns.addAll(readColumnAndPartitions(tbId, sql)); + // read Paritions + String sql2 = String.format("" + + "SELECT PARTITION_KEYS.* " + + "FROM PARTITION_KEYS, TBLS, DBS " + + "WHERE PARTITION_KEYS.TBL_ID = TBLS.TBL_ID " + + "AND TBLS.DB_ID = DBS.DB_ID " + + "AND TBLS.TBL_NAME='%s' " + + "AND DBS.NAME = '%s' ", + tableName, dbName); + columns.addAll(readColumnAndPartitions(tbId, sql2)); + + return columns; + } + + @Override + protected List getTableNameList() { + if (StringUtils.isEmpty(filterRegexp)) { + throw new RuntimeException("`Filter regular expression` needed to be set"); + } + + try (PreparedStatement preparedStatement = connection.prepareStatement( + String.format("SELECT TBL_NAME FROM TBLS WHERE TBL_NAME LIKE '%s'", filterRegexp))) { + ResultSet resultSet = preparedStatement.executeQuery(); + List tableNames = new ArrayList<>(); + while (resultSet.next()) { + tableNames.add(resultSet.getString(1)); + } + return tableNames; + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } + + private String getDatabasePosition() { + if (prop.getDbName() == null) { + throw new RuntimeException("Error when extracting dbName from property, " + + "please check properties"); + } + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT NAME FROM DBS WHERE NAME = \"" + prop.getDbName() + "\"")) { + ResultSet resultSet = preparedStatement.executeQuery(); + if (! resultSet.next()) { + throw new RuntimeException("Execute `SELECT NAME FROM DBS WHERE NAME = " + + prop.getDbName() + " ` failed!!"); + } + String database = resultSet.getString(1); + if (Objects.isNull(database)) { + throw new RuntimeException("Please add db_name in `jdbcUrl`"); + } + return database; + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } + + private List readColumnAndPartitions(Long tbId, String sql) { + List columns = new ArrayList<>(); + try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { + ResultSet resultSet = preparedStatement.executeQuery(); + while (resultSet.next()) { + ColumnValue value = new ColumnValue(); + value.setColumnName(resultSet.getString(3)); + value.setTypeName(resultSet.getString(4)); + value.setCdId(tbId); + value.setIntegerIdx(resultSet.getInt(5)); + value.setComment("Who am I"); + columns.add(value); + } + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + return columns; + } +} diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/MetadataCollector.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/MetadataCollector.java new file mode 100644 index 00000000..a9c25428 --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/MetadataCollector.java @@ -0,0 +1,137 @@ +package com.qihoo.qsql.metadata.collect; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.qihoo.qsql.metadata.MetadataClient; +import com.qihoo.qsql.metadata.collect.dto.ElasticsearchProp; +import com.qihoo.qsql.metadata.collect.dto.HiveProp; +import com.qihoo.qsql.metadata.collect.dto.JdbcProp; +import com.qihoo.qsql.metadata.entity.ColumnValue; +import com.qihoo.qsql.metadata.entity.DatabaseParamValue; +import com.qihoo.qsql.metadata.entity.DatabaseValue; +import com.qihoo.qsql.metadata.entity.TableValue; +import com.qihoo.qsql.utils.PropertiesReader; +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class MetadataCollector { + + private static final Logger LOGGER = LoggerFactory.getLogger(MetadataCollector.class); + private static ObjectMapper mapper = new ObjectMapper(); + + static { + PropertiesReader.configLogger(); + } + + String filterRegexp; + private MetadataClient client = new MetadataClient(); + + MetadataCollector(String filterRegexp) throws SQLException { + this.filterRegexp = filterRegexp; + } + + /** + * . + */ + public static MetadataCollector create(String json, String dataSource, String regexp) { + try { + LOGGER.info("Connecting server....."); + switch (dataSource.toLowerCase()) { + case "oracle": + return new OracleCollector( + mapper.readValue(json, JdbcProp.class), regexp); + case "hive": + return new HiveCollector( + mapper.readValue(json, HiveProp.class), regexp); + case "mysql": + return new MysqlCollector( + mapper.readValue(json, JdbcProp.class), regexp); + case "es": + case "elasticsearch": + return new ElasticsearchCollector( + mapper.readValue(json, ElasticsearchProp.class), regexp); + default: + throw new RuntimeException("Unsupported datasource."); + } + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } + + /** + * entrance. + */ + public static void main(String[] args) throws SQLException { + if (args.length < 2) { + throw new RuntimeException("Required conn info and type at least"); + } + + LOGGER.info("Input params: properties({}), type({}), filter regex({})", + args[0], args[1], args[2]); + MetadataCollector.create(args[0], args[1], args[2]).execute(); + System.exit(0); + } + + /** + * . + */ + public void execute() throws SQLException { + try { + LOGGER.info("Connected successfully!!"); + client.setAutoCommit(false); + DatabaseValue dbValue = convertDatabaseValue(); + Long dbId; + DatabaseValue origin = client.getBasicDatabaseInfo(dbValue.getName()); + if (Objects.isNull(origin)) { + dbId = client.insertBasicDatabaseInfo(dbValue); + List dbParams = convertDatabaseParamValue(dbId); + client.insertDatabaseSchema(dbParams); + LOGGER.info("Insert database {} successfully!!", dbValue.getName()); + } else { + dbId = origin.getDbId(); + LOGGER.info("Reuse database {}!!", dbValue); + } + List tableNames = getTableNameList(); + for (String table : tableNames) { + Long tbId; + List originTable = client.getTableSchema(table); + + if (originTable.stream().noneMatch(val -> val.getDbId().equals(dbId))) { + TableValue tableValue = convertTableValue(dbId, table); + tbId = client.insertTableSchema(tableValue); + LOGGER.info("Insert table {} successfully!!", tableValue.getTblName()); + List cols = convertColumnValue(tbId, table, dbValue.getName()); + client.insertFieldsSchema(cols); + } else { + TableValue shoot = originTable.stream() + .filter(val -> val.getDbId().equals(dbId)).findFirst() + .orElseThrow(() -> new RuntimeException("Query table error.")); + tbId = shoot.getTblId(); + LOGGER.info("Reuse table {}!!", shoot.getTblName()); + client.deleteFieldsSchema(tbId); + LOGGER.info("Delete fields of table {}!!", shoot.getTblName()); + List cols = convertColumnValue(tbId, table, dbValue.getName()); + client.insertFieldsSchema(cols); + } + } + client.commit(); + LOGGER.info("Successfully collected metadata for {} tables!!", tableNames.size()); + LOGGER.info(tableNames.stream().reduce((x, y) -> x + "\n" + y).orElse("")); + } catch (SQLException ex) { + client.rollback(); + LOGGER.info("Collect metadata failed!!"); + } + } + + protected abstract DatabaseValue convertDatabaseValue(); + + protected abstract List convertDatabaseParamValue(Long dbId); + + protected abstract TableValue convertTableValue(Long dbId, String tableName); + + protected abstract List convertColumnValue(Long tbId, String tableName, String dbName); + + protected abstract List getTableNameList(); +} diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/MysqlCollector.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/MysqlCollector.java new file mode 100644 index 00000000..fd179b64 --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/MysqlCollector.java @@ -0,0 +1,126 @@ +package com.qihoo.qsql.metadata.collect; + +import com.qihoo.qsql.metadata.collect.dto.JdbcProp; +import com.qihoo.qsql.metadata.entity.ColumnValue; +import com.qihoo.qsql.metadata.entity.DatabaseParamValue; +import com.qihoo.qsql.metadata.entity.DatabaseValue; +import com.qihoo.qsql.metadata.entity.TableValue; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; + +public class MysqlCollector extends MetadataCollector { + + private JdbcProp prop; + private Connection connection; + + /** + * . + */ + public MysqlCollector(JdbcProp prop, String filter) throws SQLException, ClassNotFoundException { + super(filter); + this.prop = prop; + Class.forName(prop.getJdbcDriver()); + connection = DriverManager.getConnection(prop.getJdbcUrl(), + prop.getJdbcUser(), prop.getJdbcPassword()); + } + + @Override + protected DatabaseValue convertDatabaseValue() { + DatabaseValue value = new DatabaseValue(); + value.setDbType("mysql"); + value.setDesc("Who am I"); + value.setName(getDatabasePosition()); + return value; + } + + @Override + protected List convertDatabaseParamValue(Long dbId) { + DatabaseParamValue[] paramValues = new DatabaseParamValue[4]; + for (int i = 0; i < paramValues.length; i++) { + paramValues[i] = new DatabaseParamValue(dbId); + } + paramValues[0].setParamKey("jdbcUrl").setParamValue(prop.getJdbcUrl()); + paramValues[1].setParamKey("jdbcDriver").setParamValue(prop.getJdbcDriver()); + paramValues[2].setParamKey("jdbcUser").setParamValue(prop.getJdbcUser()); + paramValues[3].setParamKey("jdbcPassword").setParamValue(prop.getJdbcPassword()); + return Arrays.stream(paramValues).collect(Collectors.toList()); + } + + @Override + protected TableValue convertTableValue(Long dbId, String tableName) { + TableValue value = new TableValue(); + value.setTblName(tableName); + value.setDbId(dbId); + value.setCreateTime(new Date().toString()); + return value; + } + + @Override + protected List convertColumnValue(Long tbId, String tableName, String dbName) { + String sql = String.format("SELECT COLUMN_NAME, DATA_TYPE, ORDINAL_POSITION " + + "FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '%s' AND TABLE_SCHEMA = '%s'", + tableName, dbName); + List columns = new ArrayList<>(); + try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { + ResultSet resultSet = preparedStatement.executeQuery(); + while (resultSet.next()) { + ColumnValue value = new ColumnValue(); + value.setColumnName(resultSet.getString(1)); + value.setTypeName(resultSet.getString(2)); + value.setCdId(tbId); + value.setIntegerIdx(resultSet.getInt(3)); + value.setComment("Who am I"); + columns.add(value); + } + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + return columns; + } + + @Override + protected List getTableNameList() { + if (StringUtils.isEmpty(filterRegexp)) { + throw new RuntimeException("`Filter regular expression` needed to be set"); + } + + try (PreparedStatement preparedStatement = connection.prepareStatement( + String.format("SHOW TABLES LIKE '%s'", filterRegexp))) { + ResultSet resultSet = preparedStatement.executeQuery(); + List tableNames = new ArrayList<>(); + while (resultSet.next()) { + tableNames.add(resultSet.getString(1)); + } + return tableNames; + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } + + private String getDatabasePosition() { + try (PreparedStatement preparedStatement = + connection.prepareStatement("SELECT DATABASE()")) { + ResultSet resultSet = preparedStatement.executeQuery(); + if (! resultSet.next()) { + throw new RuntimeException("Execute `SELECT DATABASE()` failed!!"); + } + String database = resultSet.getString(1); + if (Objects.isNull(database)) { + throw new RuntimeException("Please add db_name in `jdbcUrl`"); + } + return database; + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } +} diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/OracleCollector.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/OracleCollector.java new file mode 100644 index 00000000..e70cc3ad --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/OracleCollector.java @@ -0,0 +1,124 @@ +package com.qihoo.qsql.metadata.collect; + +import com.qihoo.qsql.metadata.collect.dto.JdbcProp; +import com.qihoo.qsql.metadata.entity.ColumnValue; +import com.qihoo.qsql.metadata.entity.DatabaseParamValue; +import com.qihoo.qsql.metadata.entity.DatabaseValue; +import com.qihoo.qsql.metadata.entity.TableValue; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; + +public class OracleCollector extends MetadataCollector { + + private JdbcProp prop; + private Connection connection; + + OracleCollector(JdbcProp prop, String filterRegexp) throws SQLException, ClassNotFoundException { + super(filterRegexp); + this.prop = prop; + Class.forName(prop.getJdbcDriver()); + connection = DriverManager.getConnection(prop.getJdbcUrl(), + prop.getJdbcUser(), prop.getJdbcPassword()); + } + + @Override + protected DatabaseValue convertDatabaseValue() { + DatabaseValue value = new DatabaseValue(); + value.setDbType("oracle"); + value.setDesc("Who am I"); + value.setName(getDatabasePosition()); + return value; + } + + @Override + protected List convertDatabaseParamValue(Long dbId) { + DatabaseParamValue[] paramValues = new DatabaseParamValue[4]; + for (int i = 0; i < paramValues.length; i++) { + paramValues[i] = new DatabaseParamValue(dbId); + } + paramValues[0].setParamKey("jdbcUrl").setParamValue(prop.getJdbcUrl()); + paramValues[1].setParamKey("jdbcDriver").setParamValue(prop.getJdbcDriver()); + paramValues[2].setParamKey("jdbcUser").setParamValue(prop.getJdbcUser()); + paramValues[3].setParamKey("jdbcPassword").setParamValue(prop.getJdbcPassword()); + return Arrays.stream(paramValues).collect(Collectors.toList()); + } + + @Override + protected TableValue convertTableValue(Long dbId, String tableName) { + TableValue value = new TableValue(); + value.setTblName(tableName); + value.setDbId(dbId); + value.setCreateTime(new Date().toString()); + return value; + } + + @Override + protected List convertColumnValue(Long tbId, String tableName, String dbName) { + String sql = String.format("SELECT COLUMN_NAME, DATA_TYPE, COLUMN_ID " + + "FROM USER_TAB_COLUMNS WHERE TABLE_NAME = '%s'", tableName); + List columns = new ArrayList<>(); + try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { + ResultSet resultSet = preparedStatement.executeQuery(); + while (resultSet.next()) { + ColumnValue value = new ColumnValue(); + value.setColumnName(resultSet.getString(1)); + value.setTypeName(resultSet.getString(2)); + value.setCdId(tbId); + value.setIntegerIdx(resultSet.getInt(3)); + value.setComment("Who am I"); + columns.add(value); + } + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + return columns; + } + + @Override + protected List getTableNameList() { + if (StringUtils.isEmpty(filterRegexp)) { + throw new RuntimeException("`Filter regular expression` needed to be set"); + } + + try (PreparedStatement preparedStatement = connection.prepareStatement( + String.format("SELECT table_name FROM all_tables " + + "WHERE table_name LIKE '%s'", filterRegexp))) { + ResultSet resultSet = preparedStatement.executeQuery(); + List tableNames = new ArrayList<>(); + while (resultSet.next()) { + tableNames.add(resultSet.getString(1)); + } + return tableNames; + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } + + private String getDatabasePosition() { + try (PreparedStatement preparedStatement = + connection.prepareStatement( + "SELECT sys_context( 'userenv', 'current_schema' ) FROM dual")) { + ResultSet resultSet = preparedStatement.executeQuery(); + if (! resultSet.next()) { + throw new RuntimeException("Execute `SELECT name FROM v$database` failed!!"); + } + String database = resultSet.getString(1); + if (Objects.isNull(database)) { + throw new RuntimeException("Please add db_name in `jdbcUrl`"); + } + return database; + } catch (SQLException ex) { + throw new RuntimeException(ex); + } + } +} diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/ElasticsearchProp.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/ElasticsearchProp.java new file mode 100644 index 00000000..61b397db --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/ElasticsearchProp.java @@ -0,0 +1,56 @@ +package com.qihoo.qsql.metadata.collect.dto; + +import javax.validation.constraints.NotNull; + +public class ElasticsearchProp { + @NotNull + private String esNodes; + @NotNull + private int esPort; + @NotNull + private String esUser; + @NotNull + private String esPass; + @NotNull + private String esIndex; + + public String getEsNodes() { + return esNodes; + } + + public void setEsNodes(String esNodes) { + this.esNodes = esNodes; + } + + public int getEsPort() { + return esPort; + } + + public void setEsPort(int esPort) { + this.esPort = esPort; + } + + public String getEsUser() { + return esUser; + } + + public void setEsUser(String esUser) { + this.esUser = esUser; + } + + public String getEsPass() { + return esPass; + } + + public void setEsPass(String esPass) { + this.esPass = esPass; + } + + public String getEsIndex() { + return esIndex; + } + + public void setEsIndex(String esIndex) { + this.esIndex = esIndex; + } +} diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/HiveProp.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/HiveProp.java new file mode 100644 index 00000000..ee6741c4 --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/HiveProp.java @@ -0,0 +1,17 @@ +package com.qihoo.qsql.metadata.collect.dto; + +import javax.validation.constraints.NotNull; + +public class HiveProp extends JdbcProp { + + @NotNull + private String dbName; + + public String getDbName() { + return dbName; + } + + public void setDbName(String dbName) { + this.dbName = dbName; + } +} diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/JdbcProp.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/JdbcProp.java new file mode 100644 index 00000000..9da0e5be --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/JdbcProp.java @@ -0,0 +1,46 @@ +package com.qihoo.qsql.metadata.collect.dto; + +import javax.validation.constraints.NotNull; + +public class JdbcProp { + @NotNull + protected String jdbcDriver; + @NotNull + protected String jdbcUrl; + @NotNull + protected String jdbcUser; + @NotNull + protected String jdbcPassword; + + public String getJdbcDriver() { + return jdbcDriver; + } + + public void setJdbcDriver(String jdbcDriver) { + this.jdbcDriver = jdbcDriver; + } + + public String getJdbcUrl() { + return jdbcUrl; + } + + public void setJdbcUrl(String jdbcUrl) { + this.jdbcUrl = jdbcUrl; + } + + public String getJdbcUser() { + return jdbcUser; + } + + public void setJdbcUser(String jdbcUser) { + this.jdbcUser = jdbcUser; + } + + public String getJdbcPassword() { + return jdbcPassword; + } + + public void setJdbcPassword(String jdbcPassword) { + this.jdbcPassword = jdbcPassword; + } +} diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/package-info.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/package-info.java new file mode 100644 index 00000000..78b5e857 --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/dto/package-info.java @@ -0,0 +1 @@ +package com.qihoo.qsql.metadata.collect.dto; \ No newline at end of file diff --git a/core/src/main/java/com/qihoo/qsql/metadata/collect/package-info.java b/core/src/main/java/com/qihoo/qsql/metadata/collect/package-info.java new file mode 100644 index 00000000..30c88817 --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/metadata/collect/package-info.java @@ -0,0 +1 @@ +package com.qihoo.qsql.metadata.collect; \ No newline at end of file diff --git a/core/src/main/java/com/qihoo/qsql/metadata/entity/ColumnValue.java b/core/src/main/java/com/qihoo/qsql/metadata/entity/ColumnValue.java index 55d214c2..9c7b2e32 100644 --- a/core/src/main/java/com/qihoo/qsql/metadata/entity/ColumnValue.java +++ b/core/src/main/java/com/qihoo/qsql/metadata/entity/ColumnValue.java @@ -89,10 +89,8 @@ public boolean equals(Object obj) { } ColumnValue that = (ColumnValue) obj; return Objects.equals(cdId, that.cdId) - && Objects.equals(comment, that.comment) && Objects.equals(columnName, that.columnName) - && Objects.equals(typeName, that.typeName) - && Objects.equals(integerIdx, that.integerIdx); + && Objects.equals(typeName, that.typeName); } @Override diff --git a/core/src/main/java/com/qihoo/qsql/metadata/entity/DatabaseParamValue.java b/core/src/main/java/com/qihoo/qsql/metadata/entity/DatabaseParamValue.java index e4a195fb..2173fbf2 100644 --- a/core/src/main/java/com/qihoo/qsql/metadata/entity/DatabaseParamValue.java +++ b/core/src/main/java/com/qihoo/qsql/metadata/entity/DatabaseParamValue.java @@ -3,12 +3,8 @@ import java.util.Objects; /** - * Database params related information. - *

- * dbTd, database id - * paramKey, param key for database, such as esIndex. - * paramValue, param value for database, such as 127.0.0.1. - *

+ * Database params related information.

dbTd, database id paramKey, param key for database, such as esIndex. + * paramValue, param value for database, such as 127.0.0.1.

*/ public class DatabaseParamValue { @@ -16,10 +12,16 @@ public class DatabaseParamValue { private String paramKey; private String paramValue; - public DatabaseParamValue() {} + public DatabaseParamValue() { + } + + public DatabaseParamValue(Long dbId) { + this.dbId = dbId; + } /** * constructor. + * * @param dbId dbId * @param paramKey paramKey. * @param paramValue paramValue. @@ -42,16 +44,18 @@ public String getParamKey() { return paramKey; } - public void setParamKey(String paramKey) { + public DatabaseParamValue setParamKey(String paramKey) { this.paramKey = paramKey; + return this; } public String getParamValue() { return paramValue; } - public void setParamValue(String paramValue) { + public DatabaseParamValue setParamValue(String paramValue) { this.paramValue = paramValue; + return this; } @Override diff --git a/core/src/main/java/com/qihoo/qsql/metadata/entity/DatabaseValue.java b/core/src/main/java/com/qihoo/qsql/metadata/entity/DatabaseValue.java index cd5544d0..dd0f56d9 100644 --- a/core/src/main/java/com/qihoo/qsql/metadata/entity/DatabaseValue.java +++ b/core/src/main/java/com/qihoo/qsql/metadata/entity/DatabaseValue.java @@ -67,7 +67,6 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(dbId, desc, name, dbType); } } diff --git a/core/src/main/java/com/qihoo/qsql/metadata/utils/MetadataTransformer.java b/core/src/main/java/com/qihoo/qsql/metadata/utils/MetadataTransformer.java deleted file mode 100644 index 7c4a2802..00000000 --- a/core/src/main/java/com/qihoo/qsql/metadata/utils/MetadataTransformer.java +++ /dev/null @@ -1,126 +0,0 @@ -package com.qihoo.qsql.metadata.utils; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.qihoo.qsql.metadata.MetadataClient; -import com.qihoo.qsql.metadata.entity.ColumnValue; -import com.qihoo.qsql.metadata.entity.DatabaseParamValue; -import com.qihoo.qsql.metadata.entity.DatabaseValue; -import com.qihoo.qsql.metadata.entity.TableValue; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.sql.SQLException; -import java.util.List; -import java.util.stream.Collectors; -import org.apache.calcite.model.JsonCustomSchema; -import org.apache.calcite.model.JsonCustomTable; -import org.apache.calcite.model.JsonRoot; -import org.apache.calcite.model.JsonTable; -import org.apache.commons.cli.ParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MetadataTransformer { - - private static final Logger LOGGER = LoggerFactory.getLogger(MetadataTransformer.class); - - /** - * Import metadata from json file to sqlite. - * - * @param args D - */ - public static void main(String[] args) throws SQLException, ParseException { - String path = MetadataTransformer.class.getClassLoader().getResource("QSql.json").getPath(); - // Option optionMetaJson = Option.builder().longOpt("json").hasArg().desc("metadata json").build(); - // - // Options options = new Options(); - // options.addOption(optionMetaJson); - // - // CommandLineParser parser = new DefaultParser(); - - // CommandLine commandLine = parser.parse(options, args); - // if (commandLine.hasOption("json")) { - // String path = commandLine.getOptionValue("json"); - File file = new File(path); - try (BufferedReader reader = new BufferedReader(new FileReader(file))) { - String json = reader.lines().reduce((x, y) -> x + y).orElse(""); - importMetadata(json); - } catch (IOException ex) { - throw new RuntimeException(ex); - } - // } else { - // LOGGER.error("No transformable metadata json!! Please input with `json` or `file` option " - // + "then try again"); - // } - } - - private static void importMetadata(String json) throws SQLException, IOException { - MetadataClient client = null; - try { - client = new MetadataClient(); - client.setAutoCommit(false); - JsonRoot root = new ObjectMapper().readValue(json, JsonRoot.class); - MetadataClient finalClient = client; - root.schemas.forEach(schema -> { - JsonCustomSchema customSchema = ((JsonCustomSchema) schema); - DatabaseValue dbValue = new DatabaseValue(); - dbValue.setName(customSchema.name); - dbValue.setDesc("For test"); - dbValue.setDbType(getType(customSchema.factory)); - finalClient.insertBasicDatabaseInfo(dbValue); - DatabaseValue withIdDbValue = finalClient.getBasicDatabaseInfo(customSchema.name); - List tables = customSchema.tables; - tables.forEach(table -> { - JsonCustomTable customTable = ((JsonCustomTable) table); - List params = customTable.operand.entrySet().stream() - .map(entry -> new DatabaseParamValue( - withIdDbValue.getDbId(), - entry.getKey(), - entry.getValue().toString())).collect(Collectors.toList()); - if (finalClient.getDatabaseSchema(withIdDbValue.getDbId()).isEmpty()) { - finalClient.insertDatabaseSchema(params); - } - finalClient.insertTableSchema(new TableValue(withIdDbValue.getDbId(), table.name)); - - List tablesWithId = finalClient.getTableSchema(customTable.name); - if (tablesWithId.isEmpty()) { - throw new RuntimeException("ERROR about table parsed"); - } - List values = customTable.columns.stream().map(column -> { - String[] names = column.name.split(":"); - if (names.length != 2) { - throw new RuntimeException("ERROR about column parsed"); - } - return new ColumnValue(tablesWithId.get(0).getTblId(), names[0], names[1]); - }).collect(Collectors.toList()); - finalClient.insertFieldsSchema(values); - }); - }); - client.commit(); - } catch (SQLException ex) { - ex.printStackTrace(); - if (client != null) { - client.rollback(); - } - } finally { - if (client != null) { - client.close(); - } - } - } - - private static String getType(String factory) { - if (factory.contains("Elasticsearch")) { - return "es"; - } else if (factory.contains("Hive")) { - return "hive"; - } else if (factory.contains("MySQL")) { - return "mysql"; - } else { - throw new RuntimeException("No given type!!"); - } - } - - -} diff --git a/core/src/main/java/com/qihoo/qsql/plan/ProcedurePortFire.java b/core/src/main/java/com/qihoo/qsql/plan/ProcedurePortFire.java index 211920f0..c2589dfa 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/ProcedurePortFire.java +++ b/core/src/main/java/com/qihoo/qsql/plan/ProcedurePortFire.java @@ -1,6 +1,7 @@ package com.qihoo.qsql.plan; import com.qihoo.qsql.plan.proc.DirectQueryProcedure; +import com.qihoo.qsql.plan.proc.DiskLoadProcedure; import com.qihoo.qsql.plan.proc.ExtractProcedure; import com.qihoo.qsql.plan.proc.LoadProcedure; import com.qihoo.qsql.plan.proc.PreparedExtractProcedure; @@ -44,6 +45,11 @@ public QueryProcedure optimize() { } } } + + if (hasDiskLoad(currHead)) { + return currHead; + } + return new DirectQueryProcedure(currHead); } else { return currHead; @@ -63,6 +69,14 @@ private List flattenProcedures() { return procedures; } + //like sqoop + private boolean hasDiskLoad(QueryProcedure currHead) { + if (! currHead.hasNext()) { + return currHead instanceof DiskLoadProcedure; + } + return hasDiskLoad(currHead.next()); + } + private class FlattenProcedureVisitor extends ProcedureVisitor { Set flatProcedures = new HashSet<>(); diff --git a/core/src/main/java/com/qihoo/qsql/plan/QueryProcedureProducer.java b/core/src/main/java/com/qihoo/qsql/plan/QueryProcedureProducer.java index dbcc8307..bf96360c 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/QueryProcedureProducer.java +++ b/core/src/main/java/com/qihoo/qsql/plan/QueryProcedureProducer.java @@ -5,6 +5,7 @@ import com.qihoo.qsql.exception.ParseException; import com.qihoo.qsql.plan.func.SqlRunnerFuncTable; import com.qihoo.qsql.plan.proc.DataSetTransformProcedure; +import com.qihoo.qsql.plan.proc.DiskLoadProcedure; import com.qihoo.qsql.plan.proc.ExtractProcedure; import com.qihoo.qsql.plan.proc.LoadProcedure; import com.qihoo.qsql.plan.proc.MemoryLoadProcedure; @@ -28,7 +29,9 @@ import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.rules.SubQueryRemoveRule; import org.apache.calcite.schema.SchemaPlus; +import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.ext.SqlInsertOutput; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.validate.SqlConformanceEnum; @@ -50,6 +53,7 @@ public class QueryProcedureProducer { private FrameworkConfig config = null; private SqlRunnerFuncTable funcTable = SqlRunnerFuncTable.getInstance(RunnerType.DEFAULT); private Builder builder; + private SqlNode output = null; /** * Constructs an QueryProcedureProducer with init planner config. @@ -90,7 +94,8 @@ public QueryProcedure createQueryProcedure(String sql) { Map> resultRelNode = subtreeSyncopator.rootNodeSchemas; - LoadProcedure procedure = new MemoryLoadProcedure(); + LoadProcedure procedure = createLoadProcedure(); + TransformProcedure transformProcedure = new DataSetTransformProcedure(procedure, subtreeSyncopator.getRoot()); @@ -100,11 +105,30 @@ public QueryProcedure createQueryProcedure(String sql) { extractProcedures.add(PreparedExtractProcedure.createSpecificProcedure( transformProcedure, ((RelOptTableImpl) entry.getValue().getValue()), config, entry.getKey(), - entry.getValue().getKey(), sql)); + entry.getValue().getKey(), + (output instanceof SqlInsertOutput) + ? ((SqlInsertOutput) output).getSelect() : output)); } return new ProcedurePortFire(extractProcedures).optimize(); } + private LoadProcedure createLoadProcedure() { + if (! (output instanceof SqlInsertOutput)) { + return new MemoryLoadProcedure(); + } + + switch (((SqlInsertOutput) output).getDataSource().getSimple().toUpperCase()) { + case "HDFS": + SqlIdentifier path = (SqlIdentifier) ((SqlInsertOutput) output).getPath(); + if (path.names.size() != 1) { + throw new RuntimeException("Illegal path format, expected a simple path"); + } + return new DiskLoadProcedure(path.names.get(0)); + default: + throw new RuntimeException("Only support HDFS in this version."); + } + } + private void initPlannerConfig(String jsonPath) throws IOException { final SchemaPlus rootSchema = Frameworks.createRootSchema(true); @@ -135,6 +159,13 @@ private RelNode buildLogicalPlan(String sql) { try { SqlNode parsed = planner.parse(sql); + if (parsed instanceof SqlInsertOutput) { + output = (SqlInsertOutput) parsed; + parsed = ((SqlInsertOutput) parsed).getSelect(); + } else { + output = parsed; + } + SqlNode validated = planner.validate(parsed); return planner.rel(validated).rel; } catch (SqlParseException ex) { diff --git a/core/src/main/java/com/qihoo/qsql/plan/QueryTables.java b/core/src/main/java/com/qihoo/qsql/plan/QueryTables.java new file mode 100644 index 00000000..35be507d --- /dev/null +++ b/core/src/main/java/com/qihoo/qsql/plan/QueryTables.java @@ -0,0 +1,21 @@ +package com.qihoo.qsql.plan; + +import java.util.ArrayList; +import java.util.List; + +public class QueryTables { + public List tableNames = new ArrayList<>(); + private boolean isDml = false; + + public boolean isDml() { + return isDml; + } + + public void isDmlActually() { + this.isDml = true; + } + + void add(String tableName) { + this.tableNames.add(tableName); + } +} \ No newline at end of file diff --git a/core/src/main/java/com/qihoo/qsql/plan/SubtreeSyncopator.java b/core/src/main/java/com/qihoo/qsql/plan/SubtreeSyncopator.java index 8436b628..ec2ca8ba 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/SubtreeSyncopator.java +++ b/core/src/main/java/com/qihoo/qsql/plan/SubtreeSyncopator.java @@ -231,6 +231,9 @@ private boolean neededToPullUpFunctions(RelNode parent, //Dynamic op table needs to judge if func not exists in related table if (rexNodes.stream().anyMatch(rexNode -> ! rexNode.accept(checker))) { pruneSubtree(parent, single, 0); + if (Objects.isNull(builder)) { + throw new RuntimeException("Need a builder to hold runner."); + } builder.setTransformRunner(runnerFuncTable.getRunner()); return true; } diff --git a/core/src/main/java/com/qihoo/qsql/plan/TableNameCollector.java b/core/src/main/java/com/qihoo/qsql/plan/TableNameCollector.java index e87c0aac..49bbc45b 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/TableNameCollector.java +++ b/core/src/main/java/com/qihoo/qsql/plan/TableNameCollector.java @@ -1,6 +1,8 @@ package com.qihoo.qsql.plan; import com.qihoo.qsql.exception.ParseException; +import java.util.Arrays; +import java.util.List; import org.apache.calcite.avatica.util.Casing; import org.apache.calcite.avatica.util.Quoting; import org.apache.calcite.sql.SqlAsOperator; @@ -16,22 +18,19 @@ import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlOrderBy; import org.apache.calcite.sql.SqlSelect; +import org.apache.calcite.sql.ext.SqlInsertOutput; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.util.SqlVisitor; import org.apache.calcite.sql.validate.SqlConformance; import org.apache.calcite.sql.validate.SqlConformanceEnum; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - /** * Parse SQL line and extract the table name in it. */ -public class TableNameCollector implements SqlVisitor> { +public class TableNameCollector implements SqlVisitor { - private List tableNames = new ArrayList<>(); + private QueryTables tableNames = new QueryTables(); //TODO extract SqlParser to correspond with calcite-core private SqlConformance conformance = SqlConformanceEnum.MYSQL_5; private Quoting quoting = Quoting.BACK_TICK; @@ -51,19 +50,24 @@ public class TableNameCollector implements SqlVisitor> { * @param sql SQL line * @return List of TableName */ - public List parseTableName(String sql) throws SqlParseException { + public QueryTables parseTableName(String sql) throws SqlParseException { SqlParser parser = SqlParser.create(sql, config); - SqlNode sqlNode = parser.parseQuery(sql); + SqlNode sqlNode = parser.parseQuery(); return validateTableName(sqlNode.accept(this)); } @Override - public List visit(SqlLiteral sqlLiteral) { + public QueryTables visit(SqlLiteral sqlLiteral) { return tableNames; } @Override - public List visit(SqlCall sqlCall) { + public QueryTables visit(SqlCall sqlCall) { + if (sqlCall instanceof SqlInsertOutput) { + tableNames.isDmlActually(); + ((SqlInsertOutput) sqlCall).getSelect().accept(this); + } + if (sqlCall instanceof SqlSelect) { ((SqlSelect) sqlCall).getSelectList().accept(this); if (((SqlSelect) sqlCall).getFrom() != null) { @@ -97,7 +101,7 @@ public List visit(SqlCall sqlCall) { } @Override - public List visit(SqlNodeList sqlNodeList) { + public QueryTables visit(SqlNodeList sqlNodeList) { sqlNodeList.iterator().forEachRemaining((entry) -> { if (entry instanceof SqlSelect) { entry.accept(this); @@ -113,7 +117,7 @@ public List visit(SqlNodeList sqlNodeList) { } @Override - public List visit(SqlIdentifier sqlIdentifier) { + public QueryTables visit(SqlIdentifier sqlIdentifier) { if (sqlIdentifier.names.size() == 0) { return tableNames; } @@ -123,17 +127,17 @@ public List visit(SqlIdentifier sqlIdentifier) { } @Override - public List visit(SqlDataTypeSpec sqlDataTypeSpec) { + public QueryTables visit(SqlDataTypeSpec sqlDataTypeSpec) { return tableNames; } @Override - public List visit(SqlDynamicParam sqlDynamicParam) { + public QueryTables visit(SqlDynamicParam sqlDynamicParam) { return tableNames; } @Override - public List visit(SqlIntervalQualifier sqlIntervalQualifier) { + public QueryTables visit(SqlIntervalQualifier sqlIntervalQualifier) { return tableNames; } @@ -160,8 +164,8 @@ private void visitBasicCall(SqlBasicCall sqlCall) { } } - private List validateTableName(List tableNames) { - for (String tableName : tableNames) { + private QueryTables validateTableName(QueryTables tableNames) { + for (String tableName : tableNames.tableNames) { if (tableName.split("\\.", - 1).length > 2) { throw new ParseException("Qsql only support structure like dbName.tableName," + " and there is a unsupported tableName here: " + tableName); @@ -169,5 +173,4 @@ private List validateTableName(List tableNames) { } return tableNames; } - } diff --git a/core/src/main/java/com/qihoo/qsql/plan/func/DataSourceFuncTable.java b/core/src/main/java/com/qihoo/qsql/plan/func/DataSourceFuncTable.java index c7e6d293..695c59c7 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/func/DataSourceFuncTable.java +++ b/core/src/main/java/com/qihoo/qsql/plan/func/DataSourceFuncTable.java @@ -8,7 +8,7 @@ import java.util.stream.Collectors; import org.apache.calcite.adapter.elasticsearch.ElasticsearchTable; import org.apache.calcite.adapter.hive.HiveTable; -import org.apache.calcite.adapter.mysql.MySQLTable; +import org.apache.calcite.adapter.custom.JdbcTable; import org.apache.calcite.schema.Table; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlOperator; @@ -148,13 +148,19 @@ public boolean contains(Table table, SqlOperator operator, SqlRunnerFuncTable ru } if (! runnerTable.contains(operator)) { + String supportedFunctions = runnerTable.getSupportedFunctions().stream() + .map(SqlOperator::getName) + .reduce((x, y) -> x + "," + y) + .orElse(""); throw new RuntimeException(String.format( "Unsupported function '%s' in runner," - + " please contact with developer to add udf.", operator.getName())); + + " Functions supported in current version are \n:%s", + operator.getName(), supportedFunctions)); } if (table instanceof ElasticsearchTable) { return sources.get(DataSource.ELASTICSEARCH).contains(operator); - } else if (table instanceof MySQLTable) { + } else if (table instanceof JdbcTable) { + //Add oracle func table return sources.get(DataSource.MYSQL).contains(operator); } else { return table instanceof HiveTable && sources.get(DataSource.HIVE).contains(operator); diff --git a/core/src/main/java/com/qihoo/qsql/plan/func/SparkSqlDialect.java b/core/src/main/java/com/qihoo/qsql/plan/func/SparkSqlDialect.java index b3a7ce89..09494757 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/func/SparkSqlDialect.java +++ b/core/src/main/java/com/qihoo/qsql/plan/func/SparkSqlDialect.java @@ -1,19 +1,27 @@ package com.qihoo.qsql.plan.func; +import org.apache.calcite.config.NullCollation; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlKind; +import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlUtil; import org.apache.calcite.sql.SqlWriter; import org.apache.calcite.sql.fun.SqlStdOperatorTable; public class SparkSqlDialect extends SqlDialect { + public static final SqlDialect DEFAULT = + new SparkSqlDialect(EMPTY_CONTEXT + .withDatabaseProduct(DatabaseProduct.HIVE) + .withNullCollation(NullCollation.LOW)); + public SparkSqlDialect(Context context) { super(context); } - @Override public boolean supportsAggregateFunction(SqlKind kind) { + @Override + public boolean supportsAggregateFunction(SqlKind kind) { switch (kind) { case COUNT: case SUM: @@ -29,14 +37,30 @@ public SparkSqlDialect(Context context) { } } - @Override public void unparseCall(SqlWriter writer, SqlCall call, + @Override + public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) { - if (call.getOperator() == SqlStdOperatorTable.CONCAT) { SqlUtil.unparseFunctionSyntax(SparkSqlOperatorTable.CONCAT, writer, call); } else { - System.out.println(); - //do nothing + super.unparseCall(writer, call, leftPrec, rightPrec); } } + + @Override + public void unparseOffsetFetch(SqlWriter writer, SqlNode offset, + SqlNode fetch) { + unparseFetchUsingLimit(writer, offset, fetch); + } + + @Override + public SqlNode emulateNullDirection(SqlNode node, + boolean nullsFirst, boolean desc) { + return emulateNullDirectionWithIsNull(node, nullsFirst, desc); + } + + @Override + public boolean supportsCharSet() { + return false; + } } diff --git a/core/src/main/java/com/qihoo/qsql/plan/func/SqlRunnerFuncTable.java b/core/src/main/java/com/qihoo/qsql/plan/func/SqlRunnerFuncTable.java index bfe95647..f002fe12 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/func/SqlRunnerFuncTable.java +++ b/core/src/main/java/com/qihoo/qsql/plan/func/SqlRunnerFuncTable.java @@ -4,6 +4,8 @@ import com.google.common.collect.Multimap; import com.qihoo.qsql.api.SqlRunner.Builder.RunnerType; import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlSpecialOperator; @@ -53,7 +55,7 @@ interface RunnerFunctionsHolder { } static class SparkFunctionsHolder implements RunnerFunctionsHolder { - //TODO change to remove strategy + //TODO change SqlOperator to remove strategy static final SqlFunction[] FUNCTIONS = { //aggregation functions SqlStdOperatorTable.APPROX_COUNT_DISTINCT, @@ -91,7 +93,6 @@ static class SparkFunctionsHolder implements RunnerFunctionsHolder { //string functions // SqlStdOperatorTable.BASE64 -> need to add // SqlStdOperatorTable.UNBASE64 -> need to add - // SqlStdOperatorTable.CONCAT -> concat(expr: Column*) SqlStdOperatorTable.LTRIM, SqlStdOperatorTable.RTRIM, @@ -105,6 +106,7 @@ static class SparkFunctionsHolder implements RunnerFunctionsHolder { SqlStdOperatorTable.LOWER, SqlStdOperatorTable.REPLACE, SqlStdOperatorTable.SUBSTRING, + //Hive not support trim(both ' ' from '') SqlStdOperatorTable.TRIM, SqlStdOperatorTable.UPPER, @@ -151,4 +153,13 @@ public boolean contains(SqlOperator operator) { public RunnerType getRunner() { return runner; } + + /** + * . + */ + public List getSupportedFunctions() { + return operators.values().stream().filter(op -> op instanceof SqlFunction) + .map(func -> (SqlFunction) func) + .collect(Collectors.toList()); + } } diff --git a/core/src/main/java/com/qihoo/qsql/plan/proc/DiskLoadProcedure.java b/core/src/main/java/com/qihoo/qsql/plan/proc/DiskLoadProcedure.java index df1533f1..ae437c70 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/proc/DiskLoadProcedure.java +++ b/core/src/main/java/com/qihoo/qsql/plan/proc/DiskLoadProcedure.java @@ -1,5 +1,7 @@ package com.qihoo.qsql.plan.proc; +import com.qihoo.qsql.plan.ProcedureVisitor; + /** * Describe the function for saving data into disk. */ @@ -20,4 +22,9 @@ public void setResultsOnLocal() { public boolean isResultsOnLocal() { return isOnCluster; } + + @Override + public void accept(ProcedureVisitor visitor) { + visitor.visit(this); + } } diff --git a/core/src/main/java/com/qihoo/qsql/plan/proc/MemoryLoadProcedure.java b/core/src/main/java/com/qihoo/qsql/plan/proc/MemoryLoadProcedure.java index c71a6ff5..4443a5f9 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/proc/MemoryLoadProcedure.java +++ b/core/src/main/java/com/qihoo/qsql/plan/proc/MemoryLoadProcedure.java @@ -1,5 +1,7 @@ package com.qihoo.qsql.plan.proc; +import com.qihoo.qsql.plan.ProcedureVisitor; + /** * Describe the function for saving data into memory. */ @@ -8,4 +10,9 @@ public class MemoryLoadProcedure extends LoadProcedure { public MemoryLoadProcedure() { super(DataFormat.DEFAULT); } + + @Override + public void accept(ProcedureVisitor visitor) { + visitor.visit(this); + } } diff --git a/core/src/main/java/com/qihoo/qsql/plan/proc/PreparedExtractProcedure.java b/core/src/main/java/com/qihoo/qsql/plan/proc/PreparedExtractProcedure.java index c6719269..5e0cdc46 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/proc/PreparedExtractProcedure.java +++ b/core/src/main/java/com/qihoo/qsql/plan/proc/PreparedExtractProcedure.java @@ -2,6 +2,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.qihoo.qsql.metadata.MetadataMapping; import com.qihoo.qsql.utils.SqlUtil; import java.io.IOException; import java.util.AbstractList; @@ -22,7 +23,7 @@ import org.apache.calcite.adapter.enumerable.PhysType; import org.apache.calcite.adapter.enumerable.PhysTypeImpl; import org.apache.calcite.adapter.hive.HiveTable; -import org.apache.calcite.adapter.mysql.MySQLTable; +import org.apache.calcite.adapter.custom.JdbcTable; import org.apache.calcite.adapter.virtual.VirtualTable; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptLattice; @@ -40,8 +41,10 @@ import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.dialect.CalciteSqlDialect; import org.apache.calcite.sql.dialect.HiveSqlDialect; import org.apache.calcite.sql.dialect.MysqlSqlDialect; +import org.apache.calcite.sql.dialect.OracleSqlDialect; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.tools.FrameworkConfig; import org.apache.calcite.tools.Frameworks; @@ -84,7 +87,6 @@ private PreparedExtractProcedure( * @param config config of procedure * @param relNode relNode of Procedure * @param tableName tableName of Sql - * @param sql sql */ public static PreparedExtractProcedure createSpecificProcedure( QueryProcedure next, @@ -92,28 +94,42 @@ public static PreparedExtractProcedure createSpecificProcedure( FrameworkConfig config, RelNode relNode, String tableName, - String sql) { + SqlNode sqlNode) { //rewrite this paragraph if (relOptTable.getTable() instanceof ElasticsearchTable) { + String newSql = Util.toLinux(sqlNode.toSqlString(CalciteSqlDialect.DEFAULT).getSql()); return new ElasticsearchExtractor(next, ((ElasticsearchTranslatableTable) relOptTable.getTable()).getProperties(), - config, relNode, tableName, sql); + config, relNode, tableName, newSql); } else if (relOptTable.getTable() instanceof HiveTable) { return new HiveExtractor(next, ((HiveTable) relOptTable.getTable()).getProperties(), config, relNode, tableName); - } else if (relOptTable.getTable() instanceof MySQLTable) { - return new MySqlExtractor(next, - ((MySQLTable) relOptTable.getTable()).getProperties(), - config, relNode, tableName); + } else if (relOptTable.getTable() instanceof JdbcTable) { + //TODO add more jdbc type + String dbType = ((JdbcTable) relOptTable.getTable()) + .getProperties().getProperty("dbType", "unknown"); + switch (dbType) { + case MetadataMapping.MYSQL: + return new MySqlExtractor(next, ((JdbcTable) relOptTable.getTable()) + .getProperties(), config, relNode, tableName); + case MetadataMapping.ORACLE: + return new OracleExtractor(next, ((JdbcTable) relOptTable.getTable()) + .getProperties(), config, relNode, tableName); + default: + throw new RuntimeException(""); + } + } else if (relOptTable.getTable() instanceof VirtualTable) { + String newSql = Util.toLinux(sqlNode.toSqlString(CalciteSqlDialect.DEFAULT).getSql()); return new VirtualExtractor(next, ((VirtualTable) relOptTable.getTable()).getProperties(), - config, relNode, tableName, sql); + config, relNode, tableName, newSql); } else if (relOptTable.getTable() instanceof CsvTable) { + String newSql = Util.toLinux(sqlNode.toSqlString(CalciteSqlDialect.DEFAULT).getSql()); return new CsvExtractor(next, ((CsvTable) relOptTable.getTable()).getProperties(), - config, relNode, tableName, sql); + config, relNode, tableName, newSql); } else { throw new RuntimeException("Unsupported metadata type"); } @@ -358,6 +374,25 @@ public String getCategory() { } } + public static class OracleExtractor extends PreparedExtractProcedure { + + public OracleExtractor(QueryProcedure next, Properties properties, + FrameworkConfig config, RelNode relNode, + String tableName) { + super(next, properties, config, relNode, tableName); + } + + @Override + public String toRecognizedQuery() { + return sql(new OracleSqlDialect(SqlDialect.EMPTY_CONTEXT)); + } + + @Override + public String getCategory() { + return "Oracle"; + } + } + public static class HiveExtractor extends PreparedExtractProcedure { public HiveExtractor(QueryProcedure next, Properties properties, @@ -417,7 +452,9 @@ public CsvExtractor(QueryProcedure next, Properties properties, @Override public String toRecognizedQuery() { String sql = sql(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)); - this.properties.put("tableName", SqlUtil.parseTableName(sql).get(0).replaceAll("\\.", "_")); + //TODO Here is one more cost of SQL parsing here. Replace it + this.properties.put("tableName", SqlUtil.parseTableName(sql) + .tableNames.get(0).replaceAll("\\.", "_")); return sql; } diff --git a/core/src/main/java/com/qihoo/qsql/plan/proc/TransformProcedure.java b/core/src/main/java/com/qihoo/qsql/plan/proc/TransformProcedure.java index da9008fe..912397cc 100644 --- a/core/src/main/java/com/qihoo/qsql/plan/proc/TransformProcedure.java +++ b/core/src/main/java/com/qihoo/qsql/plan/proc/TransformProcedure.java @@ -1,12 +1,12 @@ package com.qihoo.qsql.plan.proc; import com.qihoo.qsql.plan.ProcedureVisitor; +import com.qihoo.qsql.plan.func.SparkSqlDialect; import java.util.List; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.rel2sql.RelToSqlConverter; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlNode; -import org.apache.calcite.sql.dialect.HiveSqlDialect; import org.apache.calcite.util.Util; /** @@ -22,7 +22,8 @@ public abstract class TransformProcedure extends QueryProcedure { * @param next next procedure in DAG * @param relNode relNode */ - public TransformProcedure(QueryProcedure next, RelNode relNode) { + public TransformProcedure(QueryProcedure next, + RelNode relNode) { super(next); this.parent = relNode; } @@ -33,8 +34,7 @@ public TransformProcedure(QueryProcedure next, RelNode relNode) { * @return sql */ public String sql() { - //TODO change to Spark Dialect, develop Spark Dialect - SqlDialect dialect = new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT); + SqlDialect dialect = new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT); RelToSqlConverter converter = new RelToSqlConverter(dialect); SqlNode sqlNode = converter.visitChild(0, parent).asStatement(); diff --git a/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticAggregateMarker.java b/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticAggregateMarker.java deleted file mode 100644 index da367531..00000000 --- a/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticAggregateMarker.java +++ /dev/null @@ -1,5 +0,0 @@ -package com.qihoo.qsql.plan.scissors; - -public class ElasticAggregateMarker { - -} diff --git a/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticJoinMarker.java b/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticJoinMarker.java deleted file mode 100644 index 2ddf3e93..00000000 --- a/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticJoinMarker.java +++ /dev/null @@ -1,57 +0,0 @@ -package com.qihoo.qsql.plan.scissors; - - -import org.apache.calcite.plan.RelTraitSet; -import org.apache.calcite.rel.RelNode; -import org.apache.calcite.rel.core.Join; -import org.apache.calcite.rel.core.JoinRelType; -import org.apache.calcite.rex.RexNode; - -public class ElasticJoinMarker extends LogicalMarker { - - @Override - public void mark(RelNode child, RelNode parent, int ordinal) { - - } - - @Override - protected boolean onMatch(RelNode current) { - if (current == null) { - return false; - } - - if (! (current instanceof Join)) { - return false; - } - - Join join = (Join) current; - // if (join.getLeft() instanceof ) - return true; - } - - private class ElasticLogicalJoin extends Join implements Rollback { - private Join origin; - private int ordinal; - private RelNode parent; - - ElasticLogicalJoin(Join input, RelNode parent, int ordinal) { - super(input.getCluster(), input.getTraitSet(), - input.getLeft(), input.getRight(), input.getCondition(), - input.getVariablesSet(), input.getJoinType()); - this.origin = input; - this.parent = parent; - this.ordinal = ordinal; - } - - @Override - public Join copy(RelTraitSet traitSet, RexNode conditionExpr, - RelNode left, RelNode right, JoinRelType joinType, boolean semiJoinDone) { - return origin.copy(traitSet, conditionExpr, left, right, joinType, semiJoinDone); - } - - @Override - public void rollback() { - parent.replaceInput(ordinal, origin); - } - } -} diff --git a/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticUnionMarker.java b/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticUnionMarker.java deleted file mode 100644 index 30ebb47e..00000000 --- a/core/src/main/java/com/qihoo/qsql/plan/scissors/ElasticUnionMarker.java +++ /dev/null @@ -1,6 +0,0 @@ -package com.qihoo.qsql.plan.scissors; - -public class ElasticUnionMarker { - - -} diff --git a/core/src/main/java/com/qihoo/qsql/plan/scissors/LogicalMarker.java b/core/src/main/java/com/qihoo/qsql/plan/scissors/LogicalMarker.java deleted file mode 100644 index 96a932cb..00000000 --- a/core/src/main/java/com/qihoo/qsql/plan/scissors/LogicalMarker.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.qihoo.qsql.plan.scissors; - -import org.apache.calcite.rel.RelNode; - -/** - * . - */ - -public abstract class LogicalMarker { - - protected abstract void mark(RelNode child, RelNode parent, int ordinal); - - protected abstract boolean onMatch(RelNode current); - - /** - * . - */ - public void transpose(RelNode child, RelNode parent, int ordinal) { - if (onMatch(child)) { - mark(child, parent, ordinal); - } - } - - protected interface Rollback { - void rollback(); - } - - protected class Memento { - private RelNode past; - - public Memento(RelNode past) { - this.past = past; - } - - public RelNode getPast() { - return past; - } - } -} diff --git a/core/src/main/java/com/qihoo/qsql/plan/scissors/LogicalScissors.java b/core/src/main/java/com/qihoo/qsql/plan/scissors/LogicalScissors.java deleted file mode 100644 index d3833bdd..00000000 --- a/core/src/main/java/com/qihoo/qsql/plan/scissors/LogicalScissors.java +++ /dev/null @@ -1,41 +0,0 @@ -package com.qihoo.qsql.plan.scissors; - -import java.util.HashSet; -import java.util.Objects; -import java.util.Set; -import org.apache.calcite.rel.RelNode; - -//一条路径上只有一个标记物生效 -public class LogicalScissors { - - private Set markers = new HashSet<>(); - - public LogicalScissors() { - // registerMarker(new ElasticJoinMarker()) - // .registerMarker(new DiffSourceMarker()); - } - - public void exec(RelNode relNode) { - injectMarker(relNode, null); - - } - - public LogicalScissors registerMarker(LogicalMarker marker) { - markers.add(marker); - return this; - } - - /** - * . - */ - public void injectMarker(RelNode child, RelNode parent) { - if (! Objects.isNull(parent)) { - //do replacing - //create a memento - } - // markers.forEach(marker -> marker.mark(child)); - child.getInputs().forEach(sub -> injectMarker(sub, child)); - } - - -} diff --git a/core/src/main/java/com/qihoo/qsql/plan/scissors/ParitionFilterMarker.java b/core/src/main/java/com/qihoo/qsql/plan/scissors/ParitionFilterMarker.java deleted file mode 100644 index 1cbc3a7c..00000000 --- a/core/src/main/java/com/qihoo/qsql/plan/scissors/ParitionFilterMarker.java +++ /dev/null @@ -1,5 +0,0 @@ -package com.qihoo.qsql.plan.scissors; - -public class ParitionFilterMarker { - -} diff --git a/core/src/main/java/com/qihoo/qsql/plan/scissors/package-info.java b/core/src/main/java/com/qihoo/qsql/plan/scissors/package-info.java deleted file mode 100644 index f2408063..00000000 --- a/core/src/main/java/com/qihoo/qsql/plan/scissors/package-info.java +++ /dev/null @@ -1 +0,0 @@ -package com.qihoo.qsql.plan.scissors; \ No newline at end of file diff --git a/core/src/main/java/com/qihoo/qsql/utils/PropertiesReader.java b/core/src/main/java/com/qihoo/qsql/utils/PropertiesReader.java index 7a4ea99d..2c8d8ba0 100644 --- a/core/src/main/java/com/qihoo/qsql/utils/PropertiesReader.java +++ b/core/src/main/java/com/qihoo/qsql/utils/PropertiesReader.java @@ -5,6 +5,7 @@ import java.io.IOException; import java.io.InputStream; import java.util.Properties; +import org.apache.log4j.PropertyConfigurator; public class PropertiesReader { @@ -81,4 +82,15 @@ public static boolean isSupportedShell() { String osName = System.getProperties().getProperty("os.name"); return ! osName.contains("Windows"); } + + /** + * Read log properties. + */ + public static void configLogger() { + String logProp; + if (((logProp = System.getenv("QSQL_HOME")) != null) && ! logProp.isEmpty()) { + PropertyConfigurator.configure(logProp + + File.separator + "conf" + File.separator + "log4j.properties"); + } + } } diff --git a/core/src/main/java/com/qihoo/qsql/utils/SqlUtil.java b/core/src/main/java/com/qihoo/qsql/utils/SqlUtil.java index bad6f5f0..8b49b659 100644 --- a/core/src/main/java/com/qihoo/qsql/utils/SqlUtil.java +++ b/core/src/main/java/com/qihoo/qsql/utils/SqlUtil.java @@ -2,8 +2,8 @@ import com.qihoo.qsql.exec.JdbcPipeline; import com.qihoo.qsql.metadata.MetadataPostman; +import com.qihoo.qsql.plan.QueryTables; import com.qihoo.qsql.plan.TableNameCollector; -import java.util.ArrayList; import java.util.List; import org.apache.calcite.sql.parser.SqlParseException; @@ -11,17 +11,17 @@ * Sql related utils. */ public class SqlUtil { - /** * Parse table names. * * @param sql sql string * @return table names */ - public static List parseTableName(String sql) { + //TODO reconstruct `QueryTables` to fit the data source directly to HDFS + public static QueryTables parseTableName(String sql) { TableNameCollector collector = new TableNameCollector(); try { - return new ArrayList<>(collector.parseTableName(sql)); + return collector.parseTableName(sql); } catch (SqlParseException ex) { throw new RuntimeException(ex.getMessage()); } diff --git a/core/src/test/java/com/qihoo/qsql/api/SqlRunnerTest.java b/core/src/test/java/com/qihoo/qsql/api/SqlRunnerTest.java index ded294ee..48110f5c 100644 --- a/core/src/test/java/com/qihoo/qsql/api/SqlRunnerTest.java +++ b/core/src/test/java/com/qihoo/qsql/api/SqlRunnerTest.java @@ -164,6 +164,12 @@ public void testMixQueryInThreeEngineWithFlink() { Assert.assertEquals(buildFlinkSqlRunner().sql(sql).getClass(), FlinkPipeline.class); } + @Test + public void testInsertOutput() { + String sql = "INSERT INTO `hello` IN HDFS SELECT 1"; + buildSparkSqlRunner().sql(sql).show(); + } + private SqlRunner buildDynamicSqlRunner() { return SqlRunner.builder() .setTransformRunner(SqlRunner.Builder.RunnerType.DEFAULT) diff --git a/core/src/test/java/com/qihoo/qsql/codegen/QueryGeneratorTest.java b/core/src/test/java/com/qihoo/qsql/codegen/QueryGeneratorTest.java index 6bdae49d..9eb9ce22 100644 --- a/core/src/test/java/com/qihoo/qsql/codegen/QueryGeneratorTest.java +++ b/core/src/test/java/com/qihoo/qsql/codegen/QueryGeneratorTest.java @@ -9,9 +9,8 @@ import com.qihoo.qsql.plan.proc.EmbeddedElasticsearchPolicy; import com.qihoo.qsql.plan.proc.QueryProcedure; import com.qihoo.qsql.utils.SqlUtil; +import java.util.Arrays; import java.util.List; -import org.hamcrest.CoreMatchers; -import org.hamcrest.MatcherAssert; import org.junit.Assert; import org.junit.ClassRule; import org.junit.Test; @@ -20,49 +19,9 @@ public class QueryGeneratorTest { @ClassRule public static final EmbeddedElasticsearchPolicy NODE = EmbeddedElasticsearchPolicy.create(); - @Test - public void testMysqlGenerator() { - String sql = "select * from edu_manage.department"; - List tableList = SqlUtil.parseTableName(sql); - QueryProcedureProducer producer = new QueryProcedureProducer( - SqlUtil.getSchemaPath(tableList)); - QueryProcedure procedure = producer.createQueryProcedure(sql); - - IntegratedQueryWrapper wrapper = new SparkBodyWrapper(); - wrapper.interpretProcedure(procedure); - wrapper.importSpecificDependency(); - - Class requirementClass = wrapper.compile(); - MatcherAssert.assertThat("", requirementClass.getSuperclass().toString(), - CoreMatchers.containsString("class com.qihoo.qsql.exec.spark.SparkRequirement")); - } - - // @Test - // public void testElasticsearchGenerator() { - // String sql = "select * from student_profile.student"; - // AbstractPipeline pipeline = SqlRunner.builder().setTransformRunner(RunnerType.SPARK).ok().sql(sql); - // Assert.assertTrue(((SparkPipeline) pipeline) - // .source() - // .contains("JavaEsSparkSQL")); - // } - @Test public void testHiveGenerator() { - String sql = "select * from action_required.homework_content"; - List tableList = SqlUtil.parseTableName(sql); - QueryProcedureProducer producer = new QueryProcedureProducer( - SqlUtil.getSchemaPath(tableList)); - QueryProcedure procedure = producer.createQueryProcedure(sql); - - IntegratedQueryWrapper wrapper = new SparkBodyWrapper(); - wrapper.interpretProcedure(procedure); - wrapper.importSpecificDependency(); - Class requirementClass = wrapper.compile(); - - MatcherAssert.assertThat("", - requirementClass.getSuperclass().toString(), - CoreMatchers - .containsString("class com.qihoo.qsql.exec.spark.SparkRequirement")); + assertGenerateClass("select * from action_required.homework_content"); } @Test @@ -71,19 +30,41 @@ public void testSameDataSourceQueryGenerator() { + "INNER JOIN (SELECT * FROM student " + "WHERE city in ('FRAMINGHAM', 'BROCKTON', 'CONCORD')) FILTERED " + "ON DEP.type = FILTERED.city"; - List tableList = SqlUtil.parseTableName(sql); - QueryProcedureProducer producer = new QueryProcedureProducer( - SqlUtil.getSchemaPath(tableList)); - QueryProcedure procedure = producer.createQueryProcedure(sql); - IntegratedQueryWrapper wrapper = new SparkBodyWrapper(); - wrapper.interpretProcedure(procedure); - wrapper.importSpecificDependency(); - wrapper.compile(); + assertGenerateClass(sql, + "createOrReplaceTempView(\"student_profile_student_1\")", + "createOrReplaceTempView(\"edu_manage_department_0\")", + "JavaEsSparkSQL.esDF(spark, config)"); } @Test public void testVirtualGenerator() { AbstractPipeline pipeline = SqlRunner.builder().setTransformRunner(RunnerType.SPARK).ok().sql("select 1"); - Assert.assertTrue(((SparkPipeline) pipeline).source().contains("tmp = spark.sql(\"select 1\")")); + System.out.println(((SparkPipeline) pipeline).source()); + Assert.assertTrue(((SparkPipeline) pipeline).source().contains("spark.sql(\"SELECT 1\")")); + } + + @Test + public void testMysqlRegexpExtract() { + assertGenerateClass("SELECT REGEXP_EXTRACT(type, '.*', 0) FROM department", + "spark.read().jdbc(\"\", \"(select dep_id, cycle, type, times from edu_manage.department) " + + "edu_manage_department_0\", " + + "SparkJdbcGenerator.config(\"username\", \"password\", \"\"))", + "createOrReplaceTempView(\"edu_manage_department_0\")", + "spark.sql(\"SELECT REGEXP_EXTRACT(type, '.*', 0) AS expr_col__0 FROM edu_manage_department_0"); + } + + private void assertGenerateClass(String sql, String...args) { + List tableList = SqlUtil.parseTableName(sql).tableNames; + QueryProcedureProducer producer = new QueryProcedureProducer( + SqlUtil.getSchemaPath(tableList), SqlRunner.builder()); + QueryProcedure procedure = producer.createQueryProcedure(sql); + + SparkBodyWrapper wrapper = new SparkBodyWrapper(); + wrapper.interpretProcedure(procedure); + wrapper.importSpecificDependency(); + wrapper.compile(); + String clazz = wrapper.toString(); + System.out.println(clazz); + Assert.assertTrue(Arrays.stream(args).allMatch(clazz::contains)); } } diff --git a/core/src/test/java/com/qihoo/qsql/exec/JdbcPipelineTest.java b/core/src/test/java/com/qihoo/qsql/exec/JdbcPipelineTest.java index bcb8304b..e9ee2e02 100644 --- a/core/src/test/java/com/qihoo/qsql/exec/JdbcPipelineTest.java +++ b/core/src/test/java/com/qihoo/qsql/exec/JdbcPipelineTest.java @@ -17,7 +17,7 @@ public void testJdbc() { SqlRunner.Builder.RunnerType runnerType = RunnerType.DEFAULT; SqlRunner runner = SqlRunner.builder() .setTransformRunner(runnerType) - .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql))) + .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql).tableNames)) .setAppName("test mysql") .setAcceptedResultsNum(2000) .ok(); @@ -31,7 +31,7 @@ public void testTextFile() { SqlRunner.Builder.RunnerType runnerType = RunnerType.DEFAULT; SqlRunner runner = SqlRunner.builder() .setTransformRunner(runnerType) - .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql))) + .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql).tableNames)) .setAppName("test virtual") .setAcceptedResultsNum(2000) .ok(); @@ -45,7 +45,7 @@ public void testTempTable() { SqlRunner.Builder.RunnerType runnerType = RunnerType.DEFAULT; SqlRunner runner = SqlRunner.builder() .setTransformRunner(runnerType) - .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql))) + .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql).tableNames)) .setAppName("test virtual") .setAcceptedResultsNum(2000) .ok(); diff --git a/core/src/test/java/com/qihoo/qsql/exec/spark/SparkPipelineTest.java b/core/src/test/java/com/qihoo/qsql/exec/spark/SparkPipelineTest.java index e39962b1..28910ae5 100644 --- a/core/src/test/java/com/qihoo/qsql/exec/spark/SparkPipelineTest.java +++ b/core/src/test/java/com/qihoo/qsql/exec/spark/SparkPipelineTest.java @@ -13,7 +13,7 @@ public void testSparkPipeline() { SqlRunner.Builder.RunnerType runnerType = RunnerType.SPARK; SqlRunner runner = SqlRunner.builder() .setTransformRunner(runnerType) - .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql))) + .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql).tableNames)) .setAppName("test hive") .setAcceptedResultsNum(10) .ok(); @@ -26,7 +26,7 @@ public void testSparkAsTextFilePipeline() { SqlRunner.Builder.RunnerType runnerType = RunnerType.SPARK; SqlRunner runner = SqlRunner.builder() .setTransformRunner(runnerType) - .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql))) + .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql).tableNames)) .setAppName("test hive") .setAcceptedResultsNum(10) .ok(); @@ -39,7 +39,7 @@ public void testSparkAsTempTablePipeline() { SqlRunner.Builder.RunnerType runnerType = RunnerType.SPARK; SqlRunner runner = SqlRunner.builder() .setTransformRunner(runnerType) - .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql))) + .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql).tableNames)) .setAppName("test hive") .setAcceptedResultsNum(10) .ok(); @@ -52,7 +52,7 @@ public void testSparkAsJsonFilePipeline() { SqlRunner.Builder.RunnerType runnerType = RunnerType.SPARK; SqlRunner runner = SqlRunner.builder() .setTransformRunner(runnerType) - .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql))) + .setSchemaPath(SqlUtil.getSchemaPath(SqlUtil.parseTableName(sql).tableNames)) .setAppName("test hive") .setAcceptedResultsNum(10) .ok(); diff --git a/core/src/test/java/com/qihoo/qsql/launcher/ExecutionDispatcherTest.java b/core/src/test/java/com/qihoo/qsql/launcher/ExecutionDispatcherTest.java index 8537a65b..ddde7ec9 100644 --- a/core/src/test/java/com/qihoo/qsql/launcher/ExecutionDispatcherTest.java +++ b/core/src/test/java/com/qihoo/qsql/launcher/ExecutionDispatcherTest.java @@ -32,7 +32,7 @@ public void setUp() { args.add("--jar"); args.add("test.jar"); args.add("--jar_name"); - args.add("./target/qsql-core-0.5.jar"); + args.add("./target/qsql-core-0.6.jar"); } @Test @@ -79,6 +79,19 @@ public void testHiveWithJdbc() { } } + @Test + public void testInsertOutput() throws SQLException, ParseException { + try { + // sql("select 1") + // .runner(RunnerType.SPARK).check(this::executedBySparkOrFlink); + sql("insert into `\\output\\` in hdfs " + + "select * from department as dep inner join homework_content as stu on dep.dep_id = stu.stu_id") + .runner(RunnerType.SPARK).check(this::executedBySparkOrFlink); + } catch (QsqlException exception) { + Assert.assertTrue(true); + } + } + private boolean executedBySparkOrFlink(Exception ex) { return ex.getMessage().contains("Process exited with an error"); } diff --git a/core/src/test/java/com/qihoo/qsql/plan/ExtSqlParserTest.java b/core/src/test/java/com/qihoo/qsql/plan/ExtSqlParserTest.java new file mode 100644 index 00000000..69b0cbb3 --- /dev/null +++ b/core/src/test/java/com/qihoo/qsql/plan/ExtSqlParserTest.java @@ -0,0 +1,73 @@ +package com.qihoo.qsql.plan; + +import com.qihoo.qsql.utils.SqlUtil; +import org.apache.calcite.avatica.util.Casing; +import org.apache.calcite.avatica.util.Quoting; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.parser.SqlParseException; +import org.apache.calcite.sql.parser.SqlParser; +import org.apache.calcite.sql.validate.SqlConformance; +import org.apache.calcite.sql.validate.SqlConformanceEnum; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for {@link ExtSqlParserTest}. + */ +public class ExtSqlParserTest { + + private SqlConformance conformance = SqlConformanceEnum.MYSQL_5; + private Quoting quoting = Quoting.BACK_TICK; + + private SqlParser.Config config = SqlParser + .configBuilder() + .setConformance(conformance) + .setQuoting(quoting) + .setQuotedCasing(Casing.UNCHANGED) + .setUnquotedCasing(Casing.UNCHANGED) + .setCaseSensitive(true) + .build(); + + @Test + public void testBasicFunction() { + check("INSERT INTO dbName.tableName(col) IN MySQL SELECT 1 as col"); + } + + @Test + public void testWriteToHdfs() { + check("INSERT INTO `hdfs://cluster:9090/hello/world` IN HDFS SELECT 1 as col LIMIT 10", true); + } + + @Test + public void testWriteToEs() { + check("INSERT INTO index.type(col1, col2, col3) IN Elasticsearch SELECT 'one', 'two', 'three'"); + } + + @Test + public void testCompoundQuery() { + check("INSERT INTO `/hello/world` IN HDFS SELECT * FROM tab WHERE tab.col LIKE '%all%'"); + } + + @Test + public void testDmlJudgement() { + check("INSERT INTO `hdfs://cluster:9090/hello/world` IN HDFS SELECT 1 as col LIMIT 10", true); + check("SELECT 1 as col LIMIT 10", false); + } + + private void check(String sql) { + SqlParser parser = SqlParser.create(sql, config); + SqlNode root = null; + try { + root = parser.parseQuery(); + } catch (SqlParseException ex) { + ex.printStackTrace(); + Assert.assertTrue(false); + } + Assert.assertEquals(root.getClass().getSimpleName(), "SqlInsertOutput"); + } + + private void check(String sql, boolean isDml) { + QueryTables tables = SqlUtil.parseTableName(sql); + Assert.assertEquals(tables.isDml(), isDml); + } +} diff --git a/core/src/test/java/com/qihoo/qsql/plan/SubtreeSyncopatorTest.java b/core/src/test/java/com/qihoo/qsql/plan/SubtreeSyncopatorTest.java index c879dce3..0f83039e 100644 --- a/core/src/test/java/com/qihoo/qsql/plan/SubtreeSyncopatorTest.java +++ b/core/src/test/java/com/qihoo/qsql/plan/SubtreeSyncopatorTest.java @@ -52,7 +52,7 @@ public void testSimpleSqlWithoutTable() { public void testSimpleSqlWithMySql() { String sql = "SELECT dep_id FROM edu_manage.department WHERE dep_id = 1"; Set result = new Sql(sql).exec(); - String[] expect = {"LogicalProject-LogicalFilter-MySQLTableScan"}; + String[] expect = {"LogicalProject-LogicalFilter-JdbcTableScan"}; Assert.assertArrayEquals("testSimpleSqlWithMySql", expect, sortRelNode(result).toArray()); } @@ -61,7 +61,7 @@ public void testSimpleSqlWithMySqlAggregate() { String sql = "SELECT times, SUM(dep_id) FROM edu_manage.department" + " WHERE times = 1 GROUP BY times"; Set result = new Sql(sql).exec(); - String[] expect = {"LogicalAggregate-LogicalProject-LogicalFilter-MySQLTableScan"}; + String[] expect = {"LogicalAggregate-LogicalProject-LogicalFilter-JdbcTableScan"}; Assert.assertArrayEquals("testSimpleSqlWithMySqlAggregate", expect, sortRelNode(result).toArray()); } @@ -71,7 +71,7 @@ public void testMixSqlWithJoin() { + " FROM edu_manage.department AS a, action_required.homework_content AS b" + " WHERE a.dep_id = b.stu_id"; Set result = new Sql(sql).exec(); - String[] expect = {"LogicalProject-MySQLTableScan", "LogicalProject-HiveTableScan"}; + String[] expect = {"LogicalProject-JdbcTableScan", "LogicalProject-HiveTableScan"}; Assert.assertArrayEquals("test", expect, sortRelNode(result).toArray()); } @@ -83,7 +83,7 @@ public void testMixSqlWithJoinAndFilter() { + " JOIN (SELECT stu_id FROM action_required.homework_content) AS b" + " ON(a.dep_id = b.stu_id)"; Set result = new Sql(sql).exec(); - String[] expect = {"LogicalProject-LogicalProject-LogicalFilter-MySQLTableScan", + String[] expect = {"LogicalProject-LogicalProject-LogicalFilter-JdbcTableScan", "LogicalProject-LogicalProject-HiveTableScan"}; Assert.assertArrayEquals("testMixSqlWithJoinAndFilter", expect, sortRelNode(result).toArray()); } @@ -93,7 +93,7 @@ public void testMixSqlWithUnion() { String sql = " (SELECT dep_id FROM edu_manage.department WHERE dep_id = 1)" + " UNION (SELECT stu_id FROM action_required.homework_content) "; Set result = new Sql(sql).exec(); - String[] expect = {"LogicalProject-LogicalProject-LogicalFilter-MySQLTableScan", + String[] expect = {"LogicalProject-LogicalProject-LogicalFilter-JdbcTableScan", "LogicalProject-LogicalProject-HiveTableScan"}; Assert.assertArrayEquals("testMixSqlWithUnion", expect, sortRelNode(result).toArray()); } @@ -104,7 +104,7 @@ public void testMixSqlWithSubQueryInSelect() { String sql = "SELECT dep_id, (SELECT COUNT(stu_id) FROM action_required.homework_content)" + " FROM edu_manage.department WHERE dep_id = 1"; Set result = new Sql(sql).execOptimize(); - String[] expect = {"LogicalProject-LogicalFilter-MySQLTableScan", + String[] expect = {"LogicalProject-LogicalFilter-JdbcTableScan", "LogicalProject-LogicalAggregate-LogicalProject-HiveTableScan"}; Assert.assertArrayEquals("testMixSqlWithUnion", expect, sortRelNode(result).toArray()); } @@ -114,7 +114,7 @@ public void testMixSqlWithSubQueryInWhereExist() { String sql = "SELECT dep_id FROM edu_manage.department WHERE EXISTS " + " (SELECT stu_id FROM action_required.homework_content )"; Set result = new Sql(sql).execOptimize(); - String[] expect = {"LogicalProject-MySQLTableScan", + String[] expect = {"LogicalProject-JdbcTableScan", "LogicalProject-LogicalAggregate-LogicalProject-HiveTableScan"}; Assert.assertArrayEquals("testMixSqlWithUnion", expect, sortRelNode(result).toArray()); } @@ -125,7 +125,7 @@ public void testMixSqlWithSubQueryInWhereIn() { String sql = "SELECT dep_id FROM edu_manage.department WHERE dep_id IN" + " (SELECT stu_id FROM action_required.homework_content)"; Set result = new Sql(sql).execOptimize(); - String[] expect = {"LogicalProject-MySQLTableScan", + String[] expect = {"LogicalProject-JdbcTableScan", "LogicalProject-LogicalAggregate-LogicalProject-HiveTableScan"}; Assert.assertArrayEquals("testMixSqlWithUnion", expect, sortRelNode(result).toArray()); } @@ -138,7 +138,7 @@ public void testSimpleSqlWithAndWithoutTableName() { + " ON(a.dep_id = b.stu_id)"; Set result = new Sql(sql).exec(); String[] expect = { - "LogicalProject-LogicalJoin-LogicalProject-LogicalFilter-MySQLTableScan-LogicalProject-LogicalValues" + "LogicalProject-LogicalJoin-LogicalProject-LogicalFilter-JdbcTableScan-LogicalProject-LogicalValues" }; Assert.assertArrayEquals("testSimpleSqlWithAndWithoutTableName", expect, sortRelNode(result).toArray()); @@ -156,8 +156,8 @@ public void testSimpleSqlWithAndWithoutTableName() { // + " (SELECT dep_id FROM edu_manage.department)"; // Set result = new Sql(sql).exec(); // String[] expect = { - // "LogicalProject-LogicalProject-LogicalJoin-LogicalProject-MySQLTableScan-LogicalProject-MySQLTableScan", - // "LogicalProject-LogicalProject-MySQLTableScan"}; + // "LogicalProject-LogicalProject-LogicalJoin-LogicalProject-JdbcTableScan-LogicalProject-JdbcTableScan", + // "LogicalProject-LogicalProject-JdbcTableScan"}; // Assert.assertArrayEquals("testMixSqlWithJoinBetweenDifferentDb", // expect, sortRelNode(result).toArray()); // } @@ -174,9 +174,9 @@ public void testSimpleSqlWithAndWithoutTableName() { // + " (SELECT id FROM edu_manage.department_student_relation)"; // Set result = new Sql(sql).exec(); // String[] expect = { - // "LogicalProject-LogicalProject-MySQLTableScan", - // "LogicalProject-LogicalProject-MySQLTableScan", - // "LogicalProject-LogicalProject-MySQLTableScan"}; + // "LogicalProject-LogicalProject-JdbcTableScan", + // "LogicalProject-LogicalProject-JdbcTableScan", + // "LogicalProject-LogicalProject-JdbcTableScan"}; // Assert.assertArrayEquals("testMixSqlWithJoinBetweenDifferentDb2", // expect, sortRelNode(result).toArray()); // } @@ -222,7 +222,7 @@ public Sql(String sql) { private static List parseTableName(String sql) { TableNameCollector collector = new TableNameCollector(); try { - return new ArrayList<>(collector.parseTableName(sql)); + return new ArrayList<>(collector.parseTableName(sql).tableNames); } catch (SqlParseException ex) { throw new RuntimeException(ex.getMessage()); } diff --git a/core/src/test/java/com/qihoo/qsql/plan/TableNameCollectorTest.java b/core/src/test/java/com/qihoo/qsql/plan/TableNameCollectorTest.java index 1990d8d2..6c618d97 100644 --- a/core/src/test/java/com/qihoo/qsql/plan/TableNameCollectorTest.java +++ b/core/src/test/java/com/qihoo/qsql/plan/TableNameCollectorTest.java @@ -5,7 +5,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.stream.Collectors; import org.apache.calcite.sql.parser.SqlParseException; import org.junit.Assert; import org.junit.Test; @@ -18,9 +17,7 @@ public class TableNameCollectorTest { private static List parseTableName(String sql) { TableNameCollector collector = new TableNameCollector(); try { - return collector.parseTableName(sql) - .stream() - .collect(Collectors.toList()); + return new ArrayList<>(collector.parseTableName(sql).tableNames); } catch (SqlParseException ex) { throw new RuntimeException(ex.getMessage()); } @@ -101,7 +98,7 @@ public void testParseMixTableNameWithSubSqlInSelectPlusAlias() { @Test public void testParseMixTableNameWithSubSqlInFrom() { String sql = "SELECT A.a1 FROM ( SELECT count(B.b1) as a1 FROM B ) as A"; - Assert.assertEquals(Arrays.asList("B"), parseTableName(sql)); + Assert.assertEquals(Collections.singletonList("B"), parseTableName(sql)); } @Test @@ -146,6 +143,18 @@ public void testParseMixTableNameWithWrongSql() { } } + @Test + public void testParseInsertInto() { + String sql = "INSERT INTO `hello` IN HDFS SELECT 1"; + TableNameCollector collector = new TableNameCollector(); + try { + Assert.assertTrue(collector.parseTableName(sql).isDml()); + } catch (SqlParseException ex) { + ex.printStackTrace(); + Assert.assertTrue(false); + } + } + @Test public void testParseSingleBdNameAndTableName() { String sql = "SELECT a1 FROM A.A"; diff --git a/core/src/test/java/com/qihoo/qsql/plan/proc/QueryProcedureTest.java b/core/src/test/java/com/qihoo/qsql/plan/proc/QueryProcedureTest.java index ddc52f28..3b5a58e8 100644 --- a/core/src/test/java/com/qihoo/qsql/plan/proc/QueryProcedureTest.java +++ b/core/src/test/java/com/qihoo/qsql/plan/proc/QueryProcedureTest.java @@ -76,7 +76,7 @@ public Void getResult() { @Test public void testOnlyValue() { String sql = "SELECT 'Hello World' AS col1, 1010 AS col2"; - prepareForChecking(sql).checkExtra("SELECT 'Hello World' AS col1, 1010 AS col2"); + prepareForChecking(sql).checkExtra("SELECT 'Hello World' AS \"col1\", 1010 AS \"col2\""); } @Test @@ -104,14 +104,14 @@ public void testScalarQueryWithGroupBy() { public void testValueIn() { String sql = "SELECT UPPER('Time') NOT IN ('Time', 'New', 'Roman') AS res"; prepareForChecking(sql) - .checkExtra("SELECT UPPER('Time') NOT IN ('Time', 'New', 'Roman') AS res"); + .checkExtra("SELECT UPPER('Time') NOT IN ('Time', 'New', 'Roman') AS \"res\""); } @Test public void testValueWithUselessTableScan() { String sql = "SELECT 1 IN (SELECT dep.times FROM edu_manage.department AS dep) AS res"; - prepareForChecking(sql).checkExtra("SELECT 1 IN " - + "(SELECT dep.times FROM edu_manage.department AS dep) AS res"); + prepareForChecking(sql).checkExtra("SELECT 1 IN (SELECT \"dep\".\"times\" " + + "FROM \"edu_manage\".\"department\" AS \"dep\") AS \"res\""); } @Test @@ -126,36 +126,37 @@ public void testFilterAnd() { public void testSelectWithoutFrom() { prepareForChecking("SELECT 1").checkExtra( "SELECT 1"); - prepareForChecking("SELECT 'hello' < Some('world', 'hi')").checkExtra( - "SELECT 'hello' < Some('world', 'hi')"); + prepareForChecking("SELECT 'hello' < SOME ('world', 'hi')").checkExtra( + "SELECT 'hello' < SOME ('world', 'hi')"); } @Test public void testSelectWithoutFromWithJoin() { String sql = "SELECT a.e1 FROM (SELECT 1 e1) as a join (SELECT 2 e2) as b ON (a.e1 = b.e2)"; prepareForChecking(sql).checkExtra( - "SELECT a.e1 FROM (SELECT 1 e1) as a join (SELECT 2 e2) as b ON (a.e1 = b.e2)"); + "SELECT \"a\".\"e1\" FROM (SELECT 1 AS \"e1\") AS \"a\" " + + "INNER JOIN (SELECT 2 AS \"e2\") AS \"b\" ON \"a\".\"e1\" = \"b\".\"e2\""); } @Test public void testSimpleArithmetic() { String sql = "SELECT ABS(-1) + FLOOR(1.23) % 1 AS res"; - prepareForChecking(sql).checkExtra("SELECT ABS(-1) + FLOOR(1.23) % 1 AS res"); + prepareForChecking(sql).checkExtra("SELECT ABS(-1) + FLOOR(1.23) % 1 AS \"res\""); } @Test public void testFunctionLength() { //original function is length String sql = "SELECT ABS(CHAR_LENGTH('Hello World')) AS res"; - prepareForChecking(sql).checkExtra("SELECT ABS(CHAR_LENGTH('Hello World')) AS res"); + prepareForChecking(sql).checkExtra("SELECT ABS(CHAR_LENGTH('Hello World')) AS \"res\""); } @Test + //TODO resolve subquery function control public void testFunctionConcat() { String sql = "SELECT SUBSTRING('Hello World', 0, 5) || SUBSTRING('Hello World', 5) AS res"; prepareForChecking(sql) - .checkExtra( - "SELECT SUBSTRING('Hello World', 0, 5) || SUBSTRING('Hello World', 5) AS res"); + .checkExtra("SELECT SUBSTRING('Hello World', 0, 5) || SUBSTRING('Hello World', 5) AS \"res\""); } @Test @@ -167,14 +168,14 @@ public void testTypeBoolean() { @Test public void testComparison() { String sql = "SELECT (1 < 2 <> TRUE) AND TRUE AS res"; - prepareForChecking(sql).checkExtra("SELECT (1 < 2 <> TRUE) AND TRUE AS res"); + prepareForChecking(sql).checkExtra("SELECT 1 < 2 <> TRUE AND TRUE AS \"res\""); } @Test public void testValueWithIn() { String sql = "SELECT UPPER('Time') NOT IN ('Time', 'New', 'Roman') AS res"; prepareForChecking(sql) - .checkExtra("SELECT UPPER('Time') NOT IN ('Time', 'New', 'Roman') AS res"); + .checkExtra("SELECT UPPER('Time') NOT IN ('Time', 'New', 'Roman') AS \"res\""); } @Test @@ -252,10 +253,11 @@ public void testComplexGroupByWithHaving() { + "GROUP BY course_type\n" + "HAVING ((COUNT(*) > 100) AND (1 = 2))\n" + "ORDER BY course_type"; - prepareForChecking(sql).checkExtra("SELECT course_type " - + "FROM action_required.homework_content AS test " - + "WHERE date_time = '20180820' GROUP BY course_type " - + "HAVING ((COUNT(*) > 100) AND (1 = 2)) ORDER BY course_type"); + prepareForChecking(sql).checkExtra("SELECT \"test\".\"course_type\" " + + "FROM \"action_required\".\"homework_content\" AS \"test\" " + + "WHERE \"test\".\"date_time\" = '20180820' " + + "GROUP BY \"test\".\"course_type\" HAVING COUNT(*) > 100 AND 1 = 2 " + + "ORDER BY \"course_type\" ORDER BY \"course_type\""); } @Test @@ -277,7 +279,7 @@ public void testScalarSubQueryWithIn() { @Test public void testComplexSingleValue() { String sql = "SELECT (SELECT (SELECT 1))"; - prepareForChecking(sql).checkExtra("SELECT (SELECT (SELECT 1))"); + prepareForChecking(sql).checkExtra("SELECT (((SELECT (((SELECT 1))))))"); } @Test @@ -317,15 +319,14 @@ public void testMixedSqlConcatAndSome() { "SELECT stu_id, date_time, signature, course_type, content FROM action_required.homework_content", "select min(times) as m, count(*) as c, count(times) as d from edu_manage.department", "{\"_source\":[\"city\",\"province\",\"digest\",\"type\",\"stu_id\"]}") - .checkTrans("SELECT TRIM(BOTH ' ' FROM student_profile_student_0.city) || " - + "TRIM(BOTH ' ' FROM action_required_homework_content_1.course_type) expr_col__0 " - + "FROM student_profile_student_0 INNER JOIN action_required_homework_content_1 " - + "ON student_profile_student_0.stu_id = action_required_homework_content_1.stu_id, " - + "edu_manage_department_2 " - + "WHERE CASE WHEN edu_manage_department_2.c = 0 " - + "THEN FALSE WHEN student_profile_student_0.digest > edu_manage_department_2.m IS TRUE " - + "THEN TRUE WHEN edu_manage_department_2.c > edu_manage_department_2.d " - + "THEN NULL ELSE student_profile_student_0.digest > edu_manage_department_2.m END"); + .checkTrans("SELECT CONCAT(TRIM(student_profile_student_0.city)," + + " TRIM(action_required_homework_content_1.course_type)) AS expr_col__0" + + " FROM student_profile_student_0 INNER JOIN action_required_homework_content_1" + + " ON student_profile_student_0.stu_id = action_required_homework_content_1.stu_id," + + " edu_manage_department_2 WHERE CASE WHEN edu_manage_department_2.c = 0" + + " THEN FALSE WHEN student_profile_student_0.digest > edu_manage_department_2.m IS TRUE" + + " THEN TRUE WHEN edu_manage_department_2.c > edu_manage_department_2.d" + + " THEN NULL ELSE student_profile_student_0.digest > edu_manage_department_2.m END"); } @@ -345,7 +346,7 @@ public void testMixedDataTimeAndReverse() { + "(SELECT signature reved, CURRENT_TIMESTAMP pmonth," + " date_time FROM action_required.homework_content " + "ORDER BY date_time) t0", - "select trim(both ' ' from type) as expr_col__0, '20180101' as pday from edu_manage.department") + "select trim(type) as expr_col__0, '20180101' as pday from edu_manage.department") .checkTrans("SELECT action_required_homework_content_0.reved, " + "action_required_homework_content_0.pmonth, " + "edu_manage_department_1.expr_col__0, edu_manage_department_1.pday " @@ -375,9 +376,9 @@ public void testMixedIfAndElasticsearchLike() { + "\"aggregations\":{\"expr_col__0\":{\"max\":{\"field\":\"digest\"}}}}") .checkTrans("SELECT student_profile_student_1.expr_col__0, " + "CASE WHEN action_required_homework_content_0.signature = 'abc' " - + "THEN 'cde' ELSE 'def' END expr_col__1, " + + "THEN 'cde' ELSE 'def' END AS expr_col__1, " + "CASE WHEN action_required_homework_content_0.date_time <> '20180820' " - + "THEN 'Hello' ELSE 'WORLD' END col FROM action_required_homework_content_0 " + + "THEN 'Hello' ELSE 'WORLD' END AS col FROM action_required_homework_content_0 " + "LEFT JOIN student_profile_student_1 ON TRUE") .checkArchitect(("[E]->[E]->[T]->[L]")); } @@ -404,7 +405,7 @@ public void testElasticsearchGroupBy() { + "group by province order by province limit 10"; prepareForChecking(sql, RunnerType.DEFAULT) .checkExtra("{\"_source\":[\"province\",\"city\"]}") - .checkTrans("SELECT COUNT(*) expr_col__0, province " + .checkTrans("SELECT COUNT(*) AS expr_col__0, province " + "FROM student_profile_student_0 GROUP BY province ORDER BY province LIMIT 10") .checkArchitect("[D]->[E]->[T]->[L]"); @@ -436,7 +437,7 @@ public void testMixedSqlSubQuery() { prepareForChecking(sql).checkExtra( "SELECT stu_id, date_time, signature, course_type, content FROM action_required.homework_content", "{\"_source\":[\"type\"]}") - .checkTrans("SELECT COUNT(*) expr_col__0, COUNT(*) expr_col__1 " + .checkTrans("SELECT COUNT(*) AS expr_col__0, COUNT(*) AS expr_col__1 " + "FROM action_required_homework_content_0 INNER JOIN student_profile_student_1 " + "ON action_required_homework_content_0.date_time = student_profile_student_1.type " + "GROUP BY action_required_homework_content_0.date_time, action_required_homework_content_0.signature") @@ -478,7 +479,7 @@ public void testSimpleJoin() { + "on department.dep_id = department_student_relation.dep_id") .checkTrans("SELECT edu_manage_department_0.stu_id," + " edu_manage_department_0.times," - + " action_required_homework_content_1.stu_id stu_id0," + + " action_required_homework_content_1.stu_id AS stu_id0," + " action_required_homework_content_1.date_time," + " action_required_homework_content_1.signature," + " action_required_homework_content_1.course_type," @@ -497,7 +498,7 @@ public void testNotExistedFunctionsInFilter() { .checkExtra("select type, times from edu_manage.department " + "where ' world ' = 'world' group by type, times") .checkTrans("SELECT type FROM edu_manage_department_0 " - + "WHERE TRIM(BOTH ' ' FROM ' hello ') = 'hello' AND CEIL(times) = 1"); + + "WHERE TRIM(' hello ') = 'hello' AND CEIL(times) = 1"); //After cut having op, there is no project in sql, so returns '*' prepareForChecking("SELECT type FROM edu_manage.department " @@ -505,7 +506,7 @@ public void testNotExistedFunctionsInFilter() { + "GROUP BY type HAVING TRIM(' hello ') = 'hello'", RunnerType.SPARK) .checkExtra("select dep_id, cycle, type, times from edu_manage.department") .checkTrans("SELECT type FROM edu_manage_department_0 WHERE LENGTH(' world ') = 3" - + " OR CEIL(times) = 1 GROUP BY type HAVING TRIM(BOTH ' ' FROM ' hello ') = 'hello'"); + + " OR CEIL(times) = 1 GROUP BY type HAVING TRIM(' hello ') = 'hello'"); } @Test @@ -528,12 +529,12 @@ public void testElasticJoin() { "{\"_source\":[\"city\",\"province\",\"digest\",\"type\",\"stu_id\"]}") .checkTrans("SELECT student_profile_student_0.city, student_profile_student_0.province," + " student_profile_student_0.digest, student_profile_student_0.type," - + " student_profile_student_0.stu_id, student_profile_student_1.city city0," - + " student_profile_student_1.province province0, student_profile_student_1.digest digest0," - + " student_profile_student_1.type type0," - + " student_profile_student_1.stu_id stu_id0 " - + "FROM student_profile_student_0 INNER JOIN student_profile_student_1 " - + "ON student_profile_student_0.stu_id = student_profile_student_1.stu_id"); + + " student_profile_student_0.stu_id, student_profile_student_1.city AS city0," + + " student_profile_student_1.province AS province0," + + " student_profile_student_1.digest AS digest0, student_profile_student_1.type AS type0," + + " student_profile_student_1.stu_id AS stu_id0 FROM student_profile_student_0" + + " INNER JOIN student_profile_student_1" + + " ON student_profile_student_0.stu_id = student_profile_student_1.stu_id"); } @Test @@ -550,6 +551,72 @@ public void testRegexpOperation() { + "FROM action_required.homework_content WHERE LENGTH(signature) > 10"); } + @Test + public void testConcatTranslate() { + prepareForChecking("SELECT signature || 'hello' || 'world' FROM action_required.homework_content") + .checkExtra("SELECT CONCAT(CONCAT(signature, 'hello'), 'world') expr_col__0 " + + "FROM action_required.homework_content"); + } + + @Test + public void testCountDistinct() { + prepareForChecking("SELECT count(distinct signature) res FROM action_required.homework_content") + .checkExtra("SELECT COUNT(DISTINCT signature) res FROM action_required.homework_content"); + } + + @Test + public void testNotExistedFunction() { + try { + prepareForChecking("SELECT CHARACTER_LENGTH(signature) res FROM action_required.homework_content", + RunnerType.DEFAULT); + } catch (RuntimeException ex) { + Assert.assertTrue(ex.getMessage().contains("Unsupported function")); + } + } + + @Test + public void testDateFunction() { + prepareForChecking("SELECT YEAR(date_time) FROM action_required.homework_content", RunnerType.DEFAULT) + .checkExtra("SELECT YEAR(date_time) expr_col__0 FROM action_required.homework_content"); + prepareForChecking("SELECT MONTH(date_time) FROM action_required.homework_content", RunnerType.DEFAULT) + .checkExtra("SELECT MONTH(date_time) expr_col__0 FROM action_required.homework_content"); + prepareForChecking("SELECT DAYOFYEAR(date_time) FROM action_required.homework_content", RunnerType.DEFAULT) + .checkExtra("SELECT DAYOFYEAR(date_time) expr_col__0 FROM action_required.homework_content"); + prepareForChecking("SELECT DAYOFWEEK(date_time) FROM action_required.homework_content", RunnerType.DEFAULT) + .checkExtra("SELECT DAYOFWEEK(date_time) expr_col__0 FROM action_required.homework_content"); + } + + @Test + public void testElasticLike() { + + } + + @Test + public void testInsertInto() { + prepareForChecking("INSERT INTO `HELLO` IN HDFS SELECT * FROM homework_content"); + } + + @Test + public void testTrimFunction() { + prepareForChecking("SELECT TRIM(BOTH ' ' FROM 'hello') FROM student_profile.student", RunnerType.DEFAULT) + .checkExtra("{\"_source\":[\"city\",\"province\",\"digest\",\"type\",\"stu_id\"]}") + .checkTrans("SELECT TRIM('hello') AS expr_col__0 FROM student_profile_student_0"); + } + + //TODO add collection type + //agg function + @Test + public void testElasticUnsupportedFunctions() { + prepareForChecking("SELECT LENGTH('ddd'), TRIM('bbb'), LOWER(type) " + + "FROM student_profile.student group by type order by " + + "type limit 3", RunnerType.DEFAULT) + .checkExtra("{\"_source\":[\"type\"]}") + .checkTrans("SELECT LENGTH('ddd') AS expr_col__0," + + " TRIM('bbb') AS expr_col__1," + + " LOWER(type) AS expr_col__2, type" + + " FROM student_profile_student_0 GROUP BY type ORDER BY type LIMIT 3"); + } + private SqlHolder prepareForChecking(String sql) { return new SqlHolder(producer.createQueryProcedure(sql)); } diff --git a/doc/API_doc.md b/doc/API_doc.md index bbe8f850..a1b5eea4 100644 --- a/doc/API_doc.md +++ b/doc/API_doc.md @@ -50,11 +50,27 @@ QSQL only supports these API currently, more API will be developed gradually. ### Submit Job -After writing the code in your project, you should package this class, then put it into your LINUX server. You need to submit the task using the submit command corresponding to the runner used in your program. If you use spark runner, you can submit just like this: - -``` -spark-submit -``` - -Note: If you have not set the runner, just use `java -jar` set up your package is also well. - +The following script templates are available when submitting API applications using spark-submit. + +``````shell +#!/bin/bash + +export QSQL_HOME="$(cd "`dirname "$0"`"/..; pwd)" + +. "${QSQL_HOME}/bin/load-qsql-env" +. "${QSQL_HOME}/bin/qsql-env" + +for jar in `find ${QSQL_HOME}/lib -maxdepth 1 -name "*.jar"` +do + if [ ! -n "${JARS}" ] + then + export JARS="${jar}" + elif [[ ! ${jar} =~ "elasticsearch-spark" ]] + then + export JARS="${JARS},${jar}" + fi +done + +/spark2.2/bin/spark-submit --class com.qihoo.qsql.CsvScanExample --conf "spark.driver.userClassPathFirst=true" --conf +"spark.executor.extraClassPath=${QSQL_HOME}/lib/qsql-core-0.6.jar" --jars ${JARS} ${QSQL_HOME}/lib/qsql-core-0.6.jar +`````` \ No newline at end of file diff --git "a/doc/API\346\226\207\346\241\243.md" "b/doc/API\346\226\207\346\241\243.md" index 21818b66..11631129 100644 --- "a/doc/API\346\226\207\346\241\243.md" +++ "b/doc/API\346\226\207\346\241\243.md" @@ -50,10 +50,27 @@ QSQL目前只支持以上API,后续随着功能迭代将开发更多可用的A ### 启动程序 -在项目中写完代码后,你应当将相关类打包,并将Jar包放在Linux服务器上,然后使用在程序中指定的Runner对应的任务提交方式提交QSQL的Jar包。例如,如果你使用Spark Runner,你可以像这样提交: +使用spark-submit提交API应用时可使用以下脚本模板。 -``` -spark-submit -``` +``` shell +#!/bin/bash + +export QSQL_HOME="$(cd "`dirname "$0"`"/..; pwd)" + +. "${QSQL_HOME}/bin/load-qsql-env" +. "${QSQL_HOME}/bin/qsql-env" + +for jar in `find ${QSQL_HOME}/lib -maxdepth 1 -name "*.jar"` +do + if [ ! -n "${JARS}" ] + then + export JARS="${jar}" + elif [[ ! ${jar} =~ "elasticsearch-spark" ]] + then + export JARS="${JARS},${jar}" + fi +done + +/spark2.2/bin/spark-submit --class com.qihoo.qsql.CsvScanExample --conf "spark.driver.userClassPathFirst=true" --conf "spark.executor.extraClassPath=${QSQL_HOME}/lib/qsql-core-0.6.jar" --jars ${JARS} ${QSQL_HOME}/lib/qsql-core-0.6.jar -注意:如果你没有设置Runner,请使用java -jar提交你的Jar包。(这句话有问题) \ No newline at end of file +``` \ No newline at end of file diff --git a/doc/BUILD_doc.md b/doc/BUILD_doc.md index c187f156..6348a4d9 100644 --- a/doc/BUILD_doc.md +++ b/doc/BUILD_doc.md @@ -5,68 +5,123 @@ ### Requirements - Java >= 1.8 -- Scala >= 2.11 - Spark >= 2.2 -- [Options] MySQL, Elasticsearch, Hive ### Deployment -Uncompress the package qsql-0.5.tar.gz +1. Download then decompress binary package. Download path: https://github.com/Qihoo360/Quicksql/releases ```shell -tar -zxvf ./qsql-0.5.tar.gz +tar -zxvf ./qsql-release-bin.tar.gz ``` -Create a soft link +2. Go to the '/conf' , open `base-env.sh`, and set the environment variables. -```shell -ln -s qsql-0.5/ qsql -``` +- JAVA_HOME (REQUIRED VERSION >= 1.8) +- SPARK_HOME (REQUIRED VERSION >= 2.2) -The main directory structure after decompression of the release package is: +3. Go to the '/bin', run the `run-example` script to test environment. -- bin: included all of scripts for building environment and running sql. -- conf: included all of configures in runtime. -- data: stored data for testing. -- metastore: included a embedded database and create table statements scripts for managing metadata. +```shell +./run-example com.qihoo.qsql.CsvJoinWithEsExample +``` -In directory ```$QSQL_HOME/conf```, configure the following files: + If you can query the following results, the deployment is successful. -- base-env.sh:Included correlated environment variables: - - JAVA_HOME - - SPARK_HOME - - QSQL_CLUSTER_URL - - QSQL_HDFS_TMP -- qsql-runner.properties:Included serveral runtime properties -- log4j.properties:Included logger level +```sql ++------+-------+----------+--------+------+-------+------+ +|deptno| name| city|province|digest| type|stu_id| ++------+-------+----------+--------+------+-------+------+ +| 40|Scholar| BROCKTON| MA| 59498|Scholar| null| +| 45| Master| CONCORD| NH| 34035| Master| null| +| 40|Scholar|FRAMINGHAM| MA| 65046|Scholar| null| ++------+-------+----------+--------+------+-------+------+ +``` ## Getting Started -### QSQL Shell - -``` -./bin/qsql -e "select 1" +Before querying the real data source, you need to put metadata information such as tables and fields into the QSQL metastore. + +### Metadata Extraction + +QSQL supports extracting metadata from MySQL, Elasticsearch, Hive and Oracle through scripts. + +#### Basic Usage + +Script Position:$QSQL_HOME/bin/meta-extract + +Accepted Parameters: + +-p: data source connection information, connection configuration details see the examples below + +-d: data source type [oracle, mysql, hive, es] + +-r: Table name filter condition, following LIKE syntax [%,_,?] + +```json +//MySQL Example +{ + "jdbcDriver": "com.mysql.jdbc.Driver", + "jdbcUrl": "jdbc:mysql://localhost:3306/db", + "jdbcUser": "user", + "jdbcPassword": "pass" +} +//Oracle Example +{ + "jdbcDriver": "oracle.jdbc.driver.OracleDriver", + "jdbcUrl": "jdbc:oracle:thin:@localhost:1521/namespace", + "jdbcUser": "user", + "jdbcPassword": "pass" +} +//Elasticsearch Example +{ + "esNodes": "192.168.1.1", + "esPort": "9000", + "esUser": "user", + "esPass": "pass", + "esIndex": "index/type" +} +//Hive Example +{ + "jdbcDriver": "com.mysql.jdbc.Driver", + "jdbcUrl": "jdbc:mysql://localhost:3306/db", + "jdbcUser": "user", + "jdbcPassword": "pass", + "dbName": "hive_db" +} ``` -### Query Example +#### Use Example -Several sample queries are included with QSQL. To run one of them, use ```./run-example [params]``` +Note: Double quotes in linux are special characters, which need to be escaped when passing JSON parameters. -Example 1: Memory Table Query +Sample 1 (MySQL): -``` -./bin/run-example com.qihoo.qsql.CsvScanExample +1. Extract the metadata of the table named my_table table from MySQL and import it into the embedded metabase. + +```shell +./meta-extract -p "{\"jdbcDriver\": \"com.mysql.jdbc.Driver\", \"jdbcUrl\": \"jdbc:mysql://localhost:3306/db\", \"jdbcUser\": \"user\",\"jdbcPassword\": \"pass\"}" -d "mysql" -r "my_table" ``` -Example 2: Hive Join MySQL +2. After the import is complete, then query. +```shell +./qsql -e "SELECT * FROM my_table LIMIT 10" ``` -./bin/run-example com.qihoo.qsql.CsvJoinWithEsExample -``` -**Note**: +Sample 2 (Elasticsearch): + +1. Extract all type metadata from Elasticsearch and import it into the embedded metabase + + ```shell + ./meta-extract -p "{\"esNodes\": \"192.168.1.1\",\"esPort\": \"9090\",\"esUser\": \"user\",\"esPass\": \"pass\",\"esIndex\": \"index/type\"}" -d "es" -r "%" + ``` + +2. After the import is complete, then query. -If you are running a hybrid query, make sure the current machine has deployed Spark, Hive and MySQL environment and inserted the correct connection information of Hive and MySQL into the metastore. +```shell +./qsql -e "SELECT name, age FROM my_type WHERE age < 24 LIMIT 10" +``` ## Properties Configure @@ -184,107 +239,4 @@ Initialize the sample data to the MySQL database ```shell cd $QSQL_HOME/bin/ ./metadata --dbType mysql --action init -``` - -### Configure Metadata - -#### Hive - -Sample Configuration: - -#### DBS - -| DB_ID | DESC | NAME | DB_TYPE | -| ----- | ------------ | ------------- | ------- | -| 26 | hive message | hive_database | hive | - -#### DATABASE_PARAMS - -| DB_ID | PARAM_KEY | PARAM_VALUE | -| ----- | --------- | ------------ | -| 26 | cluster | cluster_name | - -#### TBLS - -| TBL_ID | CREATE_TIME | DB_ID | TBL_NAME | -| ------ | ------------------- | ----- | ----------- | -| 60 | 2018-11-06 10:44:51 | 26 | hive_mobile | - -#### COLUMNS - -| CD_ID | COMMENT | COLUMN_NAME | TYPE_NAME | INTEGER_IDX | -| ----- | ------- | ----------- | --------- | ----------- | -| 60 | | retsize | string | 1 | -| 60 | | im | string | 2 | -| 60 | | wto | string | 3 | -| 60 | | pro | int | 4 | -| 60 | | pday | string | 5 | - -#### Elasticsearch - -Sample Configuration: - -#### DBS - -| DB_ID | DESC | NAME | DB_TYPE | -| ----- | ---------- | -------- | ------- | -| 24 | es message | es_index | es | - -#### DATABASE_PARAMS - -| DB_ID | PARAM_KEY | PARAM_VALUE | -| ----- | ----------- | ---------------- | -| 24 | esNodes | localhost | -| 24 | esPort | 9025 | -| 24 | esUser | es_user | -| 24 | esPass | es_password | -| 24 | esIndex | es_index/es_type | -| 24 | esScrollNum | 156 | - -#### TBLS - -| TBL_ID | CREATE_TIME | DB_ID | TBL_NAME | -| ------ | ------------------- | ----- | -------- | -| 57 | 2018-11-06 10:44:51 | 24 | profile | - -#### COLUMNS - -| CD_ID | COMMENT | COLUMN_NAME | TYPE_NAME | INTEGER_IDX | -| ----- | ------- | ----------- | --------- | ----------- | -| 57 | comment | id | int | 1 | -| 57 | comment | name | string | 2 | -| 57 | comment | country | string | 3 | -| 57 | comment | gender | string | 4 | -| 57 | comment | operator | string | 5 | - -#### MySQL - -Sample Configuration: - -#### DBS - -| DB_ID | DESC | NAME | DB_TYPE | -| ----- | ---------------- | -------------- | ------- | -| 25 | mysql db message | mysql_database | mysql | - -#### DATABASE_PARAMS - -| DB_ID | PARAM_KEY | PARAM_VALUE | -| ----- | ------------ | ------------------------------------------ | -| 25 | jdbcDriver | com.mysql.jdbc.Driver | -| 25 | jdbcUrl | jdbc:mysql://localhost:3306/mysql_database | -| 25 | jdbcUser | root | -| 25 | jdbcPassword | root | - -#### TBLS - -| TBL_ID | CREATE_TIME | DB_ID | TBL_NAME | -| ------ | ------------------- | ----- | --------- | -| 58 | 2018-11-06 10:44:51 | 25 | test_date | - -#### COLUMNS - -| CD_ID | COMMENT | COLUMN_NAME | TYPE_NAME | INTEGER_IDX | -| ----- | ------- | ----------- | --------- | ----------- | -| 58 | comment | id | int | 1 | -| 58 | comment | name | string | 2 | \ No newline at end of file +``` \ No newline at end of file diff --git "a/doc/BUILD\346\226\207\346\241\243.md" "b/doc/BUILD\346\226\207\346\241\243.md" index f7ed32dc..640a7fd1 100644 --- "a/doc/BUILD\346\226\207\346\241\243.md" +++ "b/doc/BUILD\346\226\207\346\241\243.md" @@ -4,101 +4,128 @@ ## 集群环境部署 -### 1 编译环境依赖 +### 环境依赖 -- java >= 1.8 -- scala >= 2.11 -- maven >= 3.3 +- Java >= 1.8 +- Spark >= 2.2 -### 2 编译步骤 +### 部署流程 -在源码根目录下,执行: +1. 下载并解压二进制包。下载地址:https://github.com/Qihoo360/Quicksql/releases ```shell -mvn -DskipTests clean package +tar -zxvf ./qsql-release-bin.tar.gz ``` -编译成功后执行: +2. 进入conf目录,打开base-env.sh,设置环境变量。 -```shell -ls ./target/ -``` - -在./target/目录下,会生成发布包 qsql-0.5.tar.gz。 - -### 3 部署环境依赖 +- JAVA_HOME (务必保证版本 >= 1.8) +- SPARK_HOME (务必保证版本 >= 2.2) -- CentOS 6.2 -- java >= 1.8 -- scala >= 2.11 -- spark >= 2.2 -- [可选] 目前QSQL支持的存储引擎MySQL、Elasticsearch、Hive、Druid - -### 4 客户端部署 - -在客户端解压缩发布包 qsql-0.5.tar.gz +3. 进入bin目录,执行run-example脚本测试环境。 ```shell -tar -zxvf ./qsql-0.5.tar.gz +./run-example com.qihoo.qsql.CsvJoinWithEsExample ``` -建立软链 +如果可以查询出以下结果,则表示部署成功。 -```shell -ln -s qsql-0.5/ qsql +```sql ++------+-------+----------+--------+------+-------+------+ +|deptno| name| city|province|digest| type|stu_id| ++------+-------+----------+--------+------+-------+------+ +| 40|Scholar| BROCKTON| MA| 59498|Scholar| null| +| 45| Master| CONCORD| NH| 34035| Master| null| +| 40|Scholar|FRAMINGHAM| MA| 65046|Scholar| null| ++------+-------+----------+--------+------+-------+------+ ``` -该发布包解压后的主要目录结构如下: +## 开始执行 -- bin:脚本目录 -- conf:配置文件 -- data:存放测试数据 -- lib:依赖jar包 -- metastore:元数据管理 +在查询真实数据源前,需要将数据源相关的表、字段等元数据信息录入QSQL的元数据库。 + +### 元数据录入 + +QSQL支持通过脚本录入MySQL,Elasticsearch,Hive和Oracle的元数据。 + +#### 功能介绍 + +执行脚本:/bin/meta-extract + +接收参数: + +-p: 数据源连接信息,连接配置详情见下方示例 + +-d: 数据源类型 [oracle, mysql, hive, es] + +-r: 表名过滤条件,遵循LIKE语法 [%:全部匹配,_:占位匹配,?:可选匹配] + +```json +//MySQL示例: +{ + "jdbcDriver": "com.mysql.jdbc.Driver", + "jdbcUrl": "jdbc:mysql://localhost:3306/db", + "jdbcUser": "user", + "jdbcPassword": "pass" +} +//Oracle示例: +{ + "jdbcDriver": "oracle.jdbc.driver.OracleDriver", + "jdbcUrl": "jdbc:oracle:thin:@localhost:1521/namespace", + "jdbcUser": "user", + "jdbcPassword": "pass" +} +//Elasticsearch示例: +{ + "esNodes": "192.168.1.1", + "esPort": "9000", + "esUser": "user", + "esPass": "pass", + "esIndex": "index/type" +} +//Hive示例(当前支持元数据存在MySQL中的Hive元数据抽取): +{ + "jdbcDriver": "com.mysql.jdbc.Driver", + "jdbcUrl": "jdbc:mysql://localhost:3306/db", + "jdbcUser": "user", + "jdbcPassword": "pass", + "dbName": "hive_db" +} +``` -在QSQL发布包$QSQL_HOME/conf目录中,分别配置如下文件: +#### 使用示例 -- base-env.sh:设置相关环境变量,如: - - JAVA_HOME - - SPARK_HOME - - QSQL_CLUSTER_URL - - QSQL_HDFS_TMP -- qsql-runner.properties:设置系统参数 -- log4j.properties:设置日志级别 +注意:linux中双引号"是特殊字符,传JSON参数时需要做转义。 -## 开始执行 +**示例场景一 (MySQL):** -### QSQL Shell +1. 从MySQL中取出表名为my_table表的元数据并导入内嵌元数据库 -``` -./bin/qsql -e "select 1" -``` +``````shell +./meta-extract -p "{\"jdbcDriver\": \"com.mysql.jdbc.Driver\", \"jdbcUrl\": \"jdbc:mysql://localhost:3306/db\", \"jdbcUser\": \"user\",\"jdbcPassword\": \"pass\"}" -d "mysql" -r "my_table" +`````` -### 示例程序 +2. 导入完成后,进行查询 -QSQL附带了示例目录中的几个示例程序。要运行其中一个,使用./run-example [params]。例如: +``````shell +./qsql -e "SELECT * FROM my_table LIMIT 10" +`````` -内存表数据: +**示例场景二 (Elasticsearch):** -``` -./bin/run-example com.qihoo.qsql.CsvScanExample -``` - -Hive join MySQL: - -``` -./bin/run-example com.qihoo.qsql.CsvJoinWithEsExample -``` +1. 从Elasticsearch取出所有的type元数据并导入内嵌元数据库 -**注意** +`````shell +./meta-extract -p "{\"esNodes\": \"192.168.1.1\",\"esPort\": \"9090\",\"esUser\": \"user\",\"esPass\": \"pass\",\"esIndex\": \"index/type\"}" -d "es" -r "%" +````` -``` -./run-example -``` +2. 导入完成后,进行查询 -运行混算,请确保当前客户端存在Spark、Hive、MySQL环境。并且将Hive与MySQL的连接信息添加到元数据管理中。 +``````shell +./qsql -e "SELECT name, age FROM my_type WHERE age < 24 LIMIT 10" +`````` -## 参数配置 +## 其他参数配置 ### 环境变量 @@ -140,46 +167,6 @@ Hive join MySQL: | meta.extern.schema.user | (none) | 外部数据库的用户名 | | meta.extern.schema.password | (none) | 外部数据库的密码 | -## 元数据管理 - -### 表结构 - -#### DBS - -| 表字段 | 说明 | 示例数据 | -| ------- | ---------- | ---------------- | -| DB_ID | 数据库ID | 1 | -| DESC | 数据库描述 | es 索引 | -| NAME | 数据库名 | es_profile_index | -| DB_TYPE | 数据库类型 | es、hive、mysql | - -#### DATABASE_PARAMS - -| 表字段 | 说明 | 示例数据 | -| ----------- | -------- | -------- | -| DB_ID | 数据库ID | 1 | -| PARAM_KEY | 参数名 | UserName | -| PARAM_VALUE | 参数值 | root | - -#### TBLS - -| 表字段 | 说明 | 示例数据 | -| ------------ | -------- | ------------------- | -| TBL_ID | 表ID | 101 | -| CREATED_TIME | 创建时间 | 2018-10-22 14:36:10 | -| DB_ID | 数据库ID | 1 | -| TBL_NAME | 表名 | student | - -#### COLUMNS - -| 表字段 | 说明 | 示例数据 | -| ----------- | ---------- | -------- | -| CD_ID | 字段信息ID | 10101 | -| COMMENT | 字段注释 | 学生姓名 | -| COLUMN_NAME | 字段名 | name | -| TYPE_NAME | 字段类型 | varchar | -| INTEGER_IDX | 字段顺序 | 1 | - ### 内置SQLite数据库 在QSQL发布包$QSQL_HOME/metastore目录中,存在如下文件: @@ -214,82 +201,3 @@ vim metadata.properties cd $QSQL_HOME/bin/ ./metadata --dbType mysql --action init ``` - -### 配置元数据信息 - -#### Hive - -示例配置: - -| DB_ID | DESC | NAME | DB_TYPE | -| ----- | ------------ | ------------- | ------- | -| 26 | hive message | hive_database | hive | - -| DB_ID | PARAM_KEY | PARAM_VALUE | -| ----- | --------- | ------------ | -| 26 | cluster | cluster_name | - -| TBL_ID | CREATE_TIME | DB_ID | TBL_NAME | -| ------ | ------------------- | ----- | ----------- | -| 60 | 2018-11-06 10:44:51 | 26 | hive_mobile | - -| CD_ID | COMMENT | COLUMN_NAME | TYPE_NAME | INTEGER_IDX | -| ----- | ------- | ----------- | --------- | ----------- | -| 60 | | retsize | string | 1 | -| 60 | | im | string | 2 | -| 60 | | wto | string | 3 | -| 60 | | pro | int | 4 | -| 60 | | pday | string | 5 | - -#### Elasticsearch - -示例配置: - -| DB_ID | DESC | NAME | DB_TYPE | -| ----- | ---------- | -------- | ------- | -| 24 | es message | es_index | es | - -| DB_ID | PARAM_KEY | PARAM_VALUE | -| ----- | ----------- | ---------------- | -| 24 | esNodes | localhost | -| 24 | esPort | 9025 | -| 24 | esUser | es_user | -| 24 | esPass | es_password | -| 24 | esIndex | es_index/es_type | -| 24 | esScrollNum | 156 | - -| TBL_ID | CREATE_TIME | DB_ID | TBL_NAME | -| ------ | ------------------- | ----- | -------- | -| 57 | 2018-11-06 10:44:51 | 24 | profile | - -| CD_ID | COMMENT | COLUMN_NAME | TYPE_NAME | INTEGER_IDX | -| ----- | ------- | ----------- | --------- | ----------- | -| 57 | comment | id | int | 1 | -| 57 | comment | name | string | 2 | -| 57 | comment | country | string | 3 | -| 57 | comment | gender | string | 4 | -| 57 | comment | operator | string | 5 | - -#### MySQL - -示例配置: - -| DB_ID | DESC | NAME | DB_TYPE | -| ----- | ---------------- | -------------- | ------- | -| 25 | mysql db message | mysql_database | mysql | - -| DB_ID | PARAM_KEY | PARAM_VALUE | -| ----- | ------------ | ------------------------------------------ | -| 25 | jdbcDriver | com.mysql.jdbc.Driver | -| 25 | jdbcUrl | jdbc:mysql://localhost:3006/mysql_database | -| 25 | jdbcUser | root | -| 25 | jdbcPassword | root | - -| TBL_ID | CREATE_TIME | DB_ID | TBL_NAME | -| ------ | ------------------- | ----- | --------- | -| 58 | 2018-11-06 10:44:51 | 25 | test_date | - -| CD_ID | COMMENT | COLUMN_NAME | TYPE_NAME | INTEGER_IDX | -| ----- | ------- | ----------- | --------- | ----------- | -| 58 | comment | id | int | 1 | -| 58 | comment | name | string | 2 | \ No newline at end of file diff --git a/doc/JDBC_doc.md b/doc/JDBC_doc.md index 7e37fbce..e6bf33a4 100644 --- a/doc/JDBC_doc.md +++ b/doc/JDBC_doc.md @@ -8,7 +8,8 @@ AutomaticConnection is one of the methods for querying in Quicksql, and you can ### Getting Started -Add Quicksql jars:`qsql-core-0.5.jar`,`qsql-calcite-elasticsearch-0.5.jar`,`qsql-calcite-analysis-0.5.jar` into your dependent libs and you can connect with Quicksql. +Add Quicksql jars:`qsql-core-0.6.jar`,`qsql-calcite-elasticsearch-0.6.jar`,`qsql-calcite-analysis-0.6.jar` into your +dependent libs and you can connect with Quicksql. Here is the example, diff --git "a/doc/JDBC\346\226\207\346\241\243.md" "b/doc/JDBC\346\226\207\346\241\243.md" index d99c90f0..2fba52bc 100644 --- "a/doc/JDBC\346\226\207\346\241\243.md" +++ "b/doc/JDBC\346\226\207\346\241\243.md" @@ -8,7 +8,8 @@ AutomaticConnection 是使用Quicksql进行查询的方法之一,你可以通 ### 开始执行一个SQL -首先,将Quicksql相关jar包放入你的项目依赖中,包括:`qsql-core-0.5.jar`,`qsql-calcite-elasticsearch-0.5.jar`,`qsql-calcite-analysis-0.5.jar` ,然后你就可以开始写代码啦~ +首先,将Quicksql相关jar包放入你的项目依赖中,包括:`qsql-core-0.6.jar`,`qsql-calcite-elasticsearch-0.6.jar`,`qsql-calcite-analysis-0.6.jar` +,然后你就可以开始写代码啦~ 下面是示例代码: diff --git a/elasticsearch/pom.xml b/elasticsearch/pom.xml index f9532af1..6cc296a1 100644 --- a/elasticsearch/pom.xml +++ b/elasticsearch/pom.xml @@ -23,12 +23,12 @@ limitations under the License. com.qihoo.qsql qsql - 0.5 + 0.6 qsql-calcite-elasticsearch jar - 0.5 + 0.6 qsql-calcite-elasticsearch Elasticsearch adapter for Calcite diff --git a/elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/ElasticsearchJson.java b/elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/ElasticsearchJson.java index 6129e41c..0e96f6f1 100644 --- a/elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/ElasticsearchJson.java +++ b/elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/ElasticsearchJson.java @@ -79,7 +79,7 @@ static void visitValueNodes(Aggregations aggregations, Consumer new ArrayList<>()).add(v); aggregations.forEach(a -> visitValueNodes(a, new ArrayList<>(), cons)); rows.forEach((k, v) -> { - if (v.stream().anyMatch(val -> val instanceof GroupValue)) { + if (v.stream().allMatch(val -> val instanceof GroupValue)) { v.forEach(tuple -> { Map groupRow = new LinkedHashMap<>(k.keys); groupRow.put(tuple.getName(), tuple.value()); diff --git a/example/pom.xml b/example/pom.xml index be84bd87..68a8a189 100644 --- a/example/pom.xml +++ b/example/pom.xml @@ -5,7 +5,7 @@ qsql com.qihoo.qsql - 0.5 + 0.6 4.0.0 @@ -40,7 +40,7 @@ com.qihoo.qsql qsql-core - 0.5 + 0.6 diff --git a/example/src/main/java/com/qihoo/qsql/CsvScanExample.java b/example/src/main/java/com/qihoo/qsql/CsvScanExample.java index f9c59447..9f0094ef 100644 --- a/example/src/main/java/com/qihoo/qsql/CsvScanExample.java +++ b/example/src/main/java/com/qihoo/qsql/CsvScanExample.java @@ -8,7 +8,7 @@ public class CsvScanExample { public static void main(String[] args) throws IOException { RuntimeEnv.init(); - String sql = "select * from DEPTS"; + String sql = "select * from depts"; SqlRunner.Builder.RunnerType runnerType = RunnerType.DEFAULT; SqlRunner runner = SqlRunner.builder() .setTransformRunner(runnerType) @@ -17,6 +17,6 @@ public static void main(String[] args) throws IOException { .setAcceptedResultsNum(100) .ok(); runner.sql(sql).show().run(); - System.exit(-1); + System.exit(0); } } diff --git a/example/src/main/java/com/qihoo/qsql/env/RuntimeEnv.java b/example/src/main/java/com/qihoo/qsql/env/RuntimeEnv.java index 17eacf09..11fb8206 100644 --- a/example/src/main/java/com/qihoo/qsql/env/RuntimeEnv.java +++ b/example/src/main/java/com/qihoo/qsql/env/RuntimeEnv.java @@ -13,7 +13,9 @@ import java.util.Map; public class RuntimeEnv { - + static { + PropertiesReader.configLogger(); + } private static final String TEST_DATA_URL = PropertiesReader.getTestDataFilePath(); private static final EmbeddedElasticsearchPolicy NODE = EmbeddedElasticsearchPolicy.create(); diff --git a/pom.xml b/pom.xml index ceda9ae9..2f874336 100644 --- a/pom.xml +++ b/pom.xml @@ -8,7 +8,7 @@ com.qihoo.qsql qsql pom - 0.5 + 0.6 qsql @@ -146,7 +146,7 @@ 1.6.1 - qsql-0.5 + qsql-0.6