diff --git a/metacat-common/src/main/java/com/netflix/metacat/common/type/TypeRegistry.java b/metacat-common/src/main/java/com/netflix/metacat/common/type/TypeRegistry.java index 31598edec..3feaab29a 100644 --- a/metacat-common/src/main/java/com/netflix/metacat/common/type/TypeRegistry.java +++ b/metacat-common/src/main/java/com/netflix/metacat/common/type/TypeRegistry.java @@ -17,11 +17,11 @@ */ package com.netflix.metacat.common.type; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.util.List; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -41,7 +41,7 @@ public final class TypeRegistry implements TypeManager { * Constructor. */ private TypeRegistry() { - Preconditions.checkNotNull(types, "types is null"); + Objects.requireNonNull(types, "types is null"); addType(BaseType.UNKNOWN); addType(BaseType.BIGINT); addType(BaseType.BOOLEAN); @@ -80,7 +80,7 @@ public static TypeRegistry getTypeRegistry() { * @param type parameter */ public static void verifyTypeClass(final Type type) { - Preconditions.checkNotNull(type, "type is null"); + Objects.requireNonNull(type, "types is null"); } /** @@ -121,9 +121,11 @@ private Type instantiateParametricType(final TypeSignature signature) { } final Type instantiatedType = parametricType.createType(parameterTypes.build(), signature.getLiteralParameters()); - Preconditions.checkState(instantiatedType.getTypeSignature().equals(signature), - "Instantiated parametric type name (%s) does not match expected name (%s)", - instantiatedType, signature); + if (!instantiatedType.getTypeSignature().equals(signature)) { + throw new IllegalStateException(String.format( + "Instantiated parametric type name (%s) does not match expected name (%s)", + instantiatedType, signature)); + } return instantiatedType; } @@ -135,8 +137,9 @@ private Type instantiateParametricType(final TypeSignature signature) { public void addType(final Type type) { verifyTypeClass(type); final Type existingType = types.putIfAbsent(type.getTypeSignature(), type); - Preconditions.checkState(existingType == null - || existingType.equals(type), "Type %s is already registered", type); + if (!(existingType == null || existingType.equals(type))) { + throw new IllegalStateException(String.format("Type %s is already registered", type)); + } } /** @@ -146,8 +149,9 @@ public void addType(final Type type) { */ public void addParametricType(final ParametricType parametricType) { final TypeEnum baseType = parametricType.getBaseType(); - Preconditions.checkArgument(!parametricTypes.containsKey(baseType), - "Parametric type already registered: %s", baseType); + if (parametricTypes.containsKey(baseType)) { + throw new IllegalArgumentException(String.format("Parametric type already registered: %s", baseType)); + } parametricTypes.putIfAbsent(baseType, parametricType); } diff --git a/metacat-common/src/main/java/com/netflix/metacat/common/type/TypeSignature.java b/metacat-common/src/main/java/com/netflix/metacat/common/type/TypeSignature.java index 0ffa6f479..c445a407c 100644 --- a/metacat-common/src/main/java/com/netflix/metacat/common/type/TypeSignature.java +++ b/metacat-common/src/main/java/com/netflix/metacat/common/type/TypeSignature.java @@ -19,7 +19,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; -import com.google.common.base.Preconditions; +//import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import lombok.EqualsAndHashCode; @@ -69,9 +69,10 @@ public TypeSignature( ) { if (literalParameters != null) { for (final Object literal : literalParameters) { - Preconditions.checkArgument( - literal instanceof String || literal instanceof Long, - "Unsupported literal type: %s", literal.getClass()); + if (!(literal instanceof String || literal instanceof Long)) { + throw new IllegalArgumentException( + String.format("Unsupported literal type: %s", literal.getClass())); + } } this.literalParameters = ImmutableList.copyOf(literalParameters); } else { @@ -119,17 +120,28 @@ public static TypeSignature parseTypeSignature(final String signature) { final char c = signature.charAt(i); if (c == '<') { if (bracketCount == 0) { - Preconditions.checkArgument(baseName == null, "Expected baseName to be null"); - Preconditions.checkArgument(parameterStart == -1, "Expected parameter start to be -1"); + if (baseName != null) { + throw new IllegalArgumentException("Expected baseName to be null"); + } + + if (parameterStart != -1) { + throw new IllegalArgumentException("Expected parameter start to be -1"); + } + baseName = signature.substring(0, i); parameterStart = i + 1; } bracketCount++; } else if (c == '>') { bracketCount--; - Preconditions.checkArgument(bracketCount >= 0, "Bad type signature: '%s'", signature); + if (bracketCount < 0) { + throw new IllegalArgumentException(String.format("Bad type signature: '%s'", signature)); + } + if (bracketCount == 0) { - Preconditions.checkArgument(parameterStart >= 0, "Bad type signature: '%s'", signature); + if (parameterStart < 0) { + throw new IllegalArgumentException(String.format("Bad type signature: '%s'", signature)); + } parameters.add(parseTypeSignature(signature.substring(parameterStart, i))); parameterStart = i + 1; if (i == signature.length() - 1) { @@ -138,31 +150,55 @@ public static TypeSignature parseTypeSignature(final String signature) { } } else if (c == ',') { if (bracketCount == 1 && !inLiteralParameters) { - Preconditions.checkArgument(parameterStart >= 0, "Bad type signature: '%s'", signature); + if (parameterStart < 0) { + throw new IllegalArgumentException(String.format("Bad type signature: '%s'", signature)); + } + parameters.add(parseTypeSignature(signature.substring(parameterStart, i))); parameterStart = i + 1; } else if (bracketCount == 0 && inLiteralParameters) { - Preconditions.checkArgument(parameterStart >= 0, "Bad type signature: '%s'", signature); + if (parameterStart < 0) { + throw new IllegalArgumentException(String.format("Bad type signature: '%s'", signature)); + } + literalParameters.add(parseLiteral(signature.substring(parameterStart, i))); parameterStart = i + 1; } } else if (c == '(') { - Preconditions.checkArgument(!inLiteralParameters, "Bad type signature: '%s'", signature); + if (inLiteralParameters) { + throw new IllegalArgumentException(String.format("Bad type signature: '%s'", signature)); + } + inLiteralParameters = true; if (bracketCount == 0) { if (baseName == null) { - Preconditions.checkArgument(parameters.isEmpty(), "Expected no parameters"); - Preconditions.checkArgument(parameterStart == -1, "Expected parameter start to be -1"); + if (!parameters.isEmpty()) { + throw new IllegalArgumentException("Expected no parameters"); + } + + if (parameterStart != -1) { + throw new IllegalArgumentException("Expected parameter start to be -1"); + } + baseName = signature.substring(0, i); } parameterStart = i + 1; } } else if (c == ')') { - Preconditions.checkArgument(inLiteralParameters, "Bad type signature: '%s'", signature); + if (!inLiteralParameters) { + throw new IllegalArgumentException(String.format("Bad type signature: '%s'", signature)); + } + inLiteralParameters = false; if (bracketCount == 0) { - Preconditions.checkArgument(i == signature.length() - 1, "Bad type signature: '%s'", signature); - Preconditions.checkArgument(parameterStart >= 0, "Bad type signature: '%s'", signature); + if (i != signature.length() - 1) { + throw new IllegalArgumentException(String.format("Bad type signature: '%s'", signature)); + } + + if (parameterStart < 0) { + throw new IllegalArgumentException(String.format("Bad type signature: '%s'", signature)); + } + literalParameters.add(parseLiteral(signature.substring(parameterStart, i))); return new TypeSignature(baseName, parameters, literalParameters); } @@ -173,7 +209,9 @@ public static TypeSignature parseTypeSignature(final String signature) { private static Object parseLiteral(final String literal) { if (literal.startsWith("'") || literal.endsWith("'")) { - Preconditions.checkArgument(literal.startsWith("'") && literal.endsWith("'"), "Bad literal: '%s'", literal); + if (!(literal.startsWith("'") && literal.endsWith("'"))) { + throw new IllegalArgumentException(String.format("Bad literal: '%s'", literal)); + } return literal.substring(1, literal.length() - 1); } else { return Long.parseLong(literal); diff --git a/metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/HiveConnectorFactory.java b/metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/HiveConnectorFactory.java index 2e619aa69..bec820c87 100644 --- a/metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/HiveConnectorFactory.java +++ b/metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/HiveConnectorFactory.java @@ -56,7 +56,7 @@ public class HiveConnectorFactory extends SpringConnectorFactory { connectorContext.getConfiguration() .getOrDefault(HiveConfigConstants.USE_EMBEDDED_METASTORE, "false") ); - final boolean useFastHiveService = useLocalMetastore && Boolean.parseBoolean( + final boolean useFastHiveService = Boolean.parseBoolean( connectorContext.getConfiguration() .getOrDefault(HiveConfigConstants.USE_FASTHIVE_SERVICE, "false") ); diff --git a/metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/configs/HiveConnectorConfig.java b/metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/configs/HiveConnectorConfig.java index edbec21fc..72fabd733 100644 --- a/metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/configs/HiveConnectorConfig.java +++ b/metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/configs/HiveConnectorConfig.java @@ -13,7 +13,9 @@ package com.netflix.metacat.connector.hive.configs; +import com.google.common.annotations.VisibleForTesting; import com.netflix.metacat.common.server.connectors.ConnectorContext; +import com.netflix.metacat.common.server.util.DataSourceManager; import com.netflix.metacat.common.server.util.ThreadServiceManager; import com.netflix.metacat.connector.hive.HiveConnectorDatabaseService; import com.netflix.metacat.connector.hive.HiveConnectorPartitionService; @@ -25,11 +27,17 @@ import com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter; import com.netflix.metacat.connector.hive.util.HiveConfigConstants; import lombok.extern.slf4j.Slf4j; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import javax.sql.DataSource; import java.net.URI; import java.util.concurrent.TimeUnit; @@ -42,6 +50,13 @@ @Slf4j @Configuration public class HiveConnectorConfig { + /** Default Query timeout in milliseconds. */ + private static final int DEFAULT_DATASTORE_TIMEOUT = 60000; + /** Default Query timeout in milliseconds for reads. */ + private static final int DEFAULT_DATASTORE_READ_TIMEOUT = 120000; + /** Default Query timeout in milliseconds for writes. */ + private static final int DEFAULT_DATASTORE_WRITE_TIMEOUT = 120000; + /** * create hive connector database service. * @@ -149,4 +164,136 @@ public ThreadServiceManager threadServiceManager(final ConnectorContext connecto 1000, "hive"); } + + /** + * create warehouse for file system calls. + * + * @param connectorContext connector config context + * @return WareHouse + */ + @Bean + public Warehouse warehouse(final ConnectorContext connectorContext) { + try { + final HiveConf conf = this.getDefaultConf(connectorContext); + connectorContext.getConfiguration().forEach(conf::set); + return new Warehouse(conf); + } catch (Exception e) { + throw new IllegalArgumentException( + String.format( + "Failed creating the hive warehouse for catalog: %s", + connectorContext.getCatalogName() + ), + e + ); + } + } + + /** + * hive DataSource. + * + * @param connectorContext connector config. + * @return data source + */ + @Bean + public DataSource hiveDataSource(final ConnectorContext connectorContext) { + final HiveConf conf = this.getDefaultConf(connectorContext); + connectorContext.getConfiguration().forEach(conf::set); + DataSourceManager.get().load( + connectorContext.getCatalogShardName(), + connectorContext.getConfiguration() + ); + return DataSourceManager.get().get(connectorContext.getCatalogShardName()); + } + + /** + * hive metadata Transaction Manager. + * + * @param hiveDataSource hive data source + * @return hive transaction manager + */ + @Bean + public DataSourceTransactionManager hiveTxManager( + @Qualifier("hiveDataSource") final DataSource hiveDataSource) { + return new DataSourceTransactionManager(hiveDataSource); + } + + /** + * hive metadata read JDBC template. Query timeout is set to control long running read queries. + * + * @param connectorContext connector config. + * @param hiveDataSource hive data source + * @return hive JDBC Template + */ + @Bean + public JdbcTemplate hiveReadJdbcTemplate( + final ConnectorContext connectorContext, + @Qualifier("hiveDataSource") final DataSource hiveDataSource) { + final JdbcTemplate result = new JdbcTemplate(hiveDataSource); + result.setQueryTimeout(getDataStoreReadTimeout(connectorContext) / 1000); + return result; + } + + /** + * hive metadata write JDBC template. Query timeout is set to control long running write queries. + * + * @param connectorContext connector config. + * @param hiveDataSource hive data source + * @return hive JDBC Template + */ + @Bean + public JdbcTemplate hiveWriteJdbcTemplate( + final ConnectorContext connectorContext, + @Qualifier("hiveDataSource") final DataSource hiveDataSource) { + final JdbcTemplate result = new JdbcTemplate(hiveDataSource); + result.setQueryTimeout(getDataStoreWriteTimeout(connectorContext) / 1000); + return result; + } + + @VisibleForTesting + private HiveConf getDefaultConf( + final ConnectorContext connectorContext + ) { + final HiveConf result = new HiveConf(); + result.setBoolean(HiveConfigConstants.USE_METASTORE_LOCAL, true); + + final int dataStoreTimeout = getDataStoreTimeout(connectorContext); + result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT, dataStoreTimeout); + result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT, dataStoreTimeout); + result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTOREWRITETIMEOUT, getDataStoreWriteTimeout(connectorContext)); + result.setInt(HiveConfigConstants.HIVE_METASTORE_DS_RETRY, 0); + result.setInt(HiveConfigConstants.HIVE_HMSHANDLER_RETRY, 0); + result.set( + HiveConfigConstants.JAVAX_JDO_PERSISTENCEMANAGER_FACTORY_CLASS, + HiveConfigConstants.JAVAX_JDO_PERSISTENCEMANAGER_FACTORY + ); + result.setBoolean(HiveConfigConstants.HIVE_STATS_AUTOGATHER, false); + return result; + } + + private int getDataStoreTimeout(final ConnectorContext connectorContext) { + int result = DEFAULT_DATASTORE_TIMEOUT; + try { + result = Integer.parseInt( + connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT)); + } catch (final Exception ignored) { } + return result; + } + + private int getDataStoreReadTimeout(final ConnectorContext connectorContext) { + int result = DEFAULT_DATASTORE_READ_TIMEOUT; + try { + result = Integer.parseInt( + connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT)); + } catch (final Exception ignored) { } + return result; + } + + private int getDataStoreWriteTimeout(final ConnectorContext connectorContext) { + int result = DEFAULT_DATASTORE_WRITE_TIMEOUT; + try { + result = Integer.parseInt( + connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTOREWRITETIMEOUT)); + } catch (final Exception ignored) { } + return result; + } } diff --git a/metacat-functional-tests/metacat-test-cluster/datastores/mysql/my.cnf b/metacat-functional-tests/metacat-test-cluster/datastores/mysql/my.cnf new file mode 100644 index 000000000..e4d266d0f --- /dev/null +++ b/metacat-functional-tests/metacat-test-cluster/datastores/mysql/my.cnf @@ -0,0 +1,2 @@ +[mysqld] +innodb_use_native_aio = 0 diff --git a/metacat-functional-tests/metacat-test-cluster/docker-compose.yml b/metacat-functional-tests/metacat-test-cluster/docker-compose.yml index 4236d298b..3660f027f 100644 --- a/metacat-functional-tests/metacat-test-cluster/docker-compose.yml +++ b/metacat-functional-tests/metacat-test-cluster/docker-compose.yml @@ -64,8 +64,8 @@ services: -Dmetacat.usermetadata.config.location=/etc/metacat/usermetadata.properties -Dmetacat.cache.enabled=true -Dmetacat.authorization.enabled=true - -Dmetacat.authorization.createAcl.createAclStr=embedded-fast-hive-metastore/fsmoke_acl:metacat-prod - -Dmetacat.authorization.deleteAcl.deleteAclStr=embedded-fast-hive-metastore/fsmoke_acl:metacat-prod + -Dmetacat.authorization.createAcl.createAclStr=hive-metastore/fsmoke_acl:metacat-prod + -Dmetacat.authorization.deleteAcl.deleteAclStr=hive-metastore/fsmoke_acl:metacat-prod -Dmetacat.service.tables.error.list.partitions.threshold=100 -Dmetacat.hive.commonview.enabled=true -Dmetacat.hive.commonview.deleteStorageTable=true @@ -87,6 +87,7 @@ services: platform: linux/x86_64 volumes: - ./datastores/mysql/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d:ro + - ./datastores/mysql/my.cnf:/etc/mysql/conf.d/my.cnf:ro environment: - MYSQL_ROOT_PASSWORD=root_password - MYSQL_USER=metacat_user diff --git a/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-fast-hive-metastore-shard.properties b/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-fast-hive-metastore-shard.properties deleted file mode 100644 index 5114dc9ef..000000000 --- a/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-fast-hive-metastore-shard.properties +++ /dev/null @@ -1,87 +0,0 @@ -# -# -# Copyright 2018 Netflix, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# - -catalog.name=embedded-fast-hive-metastore -connector.name=hive -metacat.has-data-external=true - -metacat.schema.whitelist=shard,shard1 -hive.use.embedded.metastore=true -hive.use.embedded.fastservice=true -hive.exec.compress.intermediate=true -hive.exec.max.dynamic.partitions.pernode=10000 -hive.exec.max.dynamic.partitions=300000 -hive.exec.parallel=true -hive.exec.reducers.max=200 -hive.exec.scratchdir=/tmp/metacat/scratch -hive.hadoop.supports.splittable.combineinputformat=true -hive.intermediate.compression.codec=org.apache.hadoop.io.compress.SnappyCodec -hive.intermediate.compression.type=BLOCK -hive.mapjoin.localtask.max.memory.usage=0.9 -hive.mapjoin.smalltable.filesize=100000000 -hive.mapred.mode=strict -hive.optimize.s3.query=true -hive.ppd.remove.duplicatefilters=false -hive.security.authorization.createtable.owner.grants=ALL -hive.security.authorization.enabled=true -mapred.max.split.size=200000000 -webinterface.private.actions=true -hive.metastore.warehouse.dir=file://tmp/hive/warehouse -hive.metastore.checkForDefaultDb=true - -datanucleus.autoCreateSchema=true -datanucleus.fixedDatastore=false -datanucleus.autoCreateTables=true -datanucleus.rdbms.CheckExistTablesOrViews=true - - -hadoop.tmp.dir=/tmp/metacat/hadoop/tmp -fs.s3.buffer.dir=/tmp/metacat/hadoop/s3,/tmp/metacat/hadoop/s3 -io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec -hive.metastore.fs.handler.class=com.netflix.metacat.connector.hive.metastore.HiveMetaStoreFsImpl - - -javax.jdo.option.name=embedded-fast-hive-metastore-shard -javax.jdo.option.username=metacat_user -javax.jdo.option.url=jdbc:mysql://hive-metastore-db:3306/shardfasthive?createDatabaseIfNotExist=true&useUnicode=true&characterEncoding=latin1&autoReconnect=true&sessionVariables=@@innodb_lock_wait_timeout=120&rewriteBatchedStatements=true&nullCatalogMeansCurrent=true -javax.jdo.option.driverClassName=com.mysql.jdbc.Driver -javax.jdo.option.password=metacat_user_password -javax.jdo.option.jmxEnabled=true -javax.jdo.option.maxActive=10 -javax.jdo.option.maxIdle=10 -javax.jdo.option.initialSize=2 -javax.jdo.option.minIdle=2 -javax.jdo.option.maxWait=60000 -javax.jdo.option.minEvictableIdleTimeMillis=180000 -javax.jdo.option.timeBetweenEvictionRunsMillis=10000 -javax.jdo.option.testOnBorrow=true -javax.jdo.option.testWhileIdle=true -javax.jdo.option.testOnReturn=false -javax.jdo.option.removeAbandonedTimeout=1800 -javax.jdo.option.removeAbandoned=true -javax.jdo.option.logAbandoned=true -javax.jdo.option.validationQuery=SELECT 1 -javax.jdo.option.jdbcInterceptors=org.apache.tomcat.jdbc.pool.interceptor.ConnectionState;org.apache.tomcat.jdbc.pool.interceptor.StatementFinalizer;org.apache.tomcat.jdbc.pool.interceptor.SlowQueryReportJmx(threshold=30000) -javax.jdo.option.defaultTransactionIsolation=READ_COMMITTED -javax.jdo.option.defaultAutoCommit=false - -hive.allow-drop-table=true -hive.allow-rename-table=true -metacat.schema.blacklist=information_schema -hive.use.embedded.sql.save.partitions=true -hive.use.embedded.sql.delete.partitions=true diff --git a/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-fast-hive-metastore.properties b/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-fast-hive-metastore.properties deleted file mode 100644 index 85170a685..000000000 --- a/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-fast-hive-metastore.properties +++ /dev/null @@ -1,66 +0,0 @@ -metacat.thrift.port=12004 -connector.name=hive -metacat.has-data-external=true - -hive.use.embedded.metastore=true -hive.use.embedded.fastservice=true -hive.exec.compress.intermediate=true -hive.exec.max.dynamic.partitions.pernode=10000 -hive.exec.max.dynamic.partitions=300000 -hive.exec.parallel=true -hive.exec.reducers.max=200 -hive.exec.scratchdir=/tmp/metacat/scratch -hive.hadoop.supports.splittable.combineinputformat=true -hive.intermediate.compression.codec=org.apache.hadoop.io.compress.SnappyCodec -hive.intermediate.compression.type=BLOCK -hive.mapjoin.localtask.max.memory.usage=0.9 -hive.mapjoin.smalltable.filesize=100000000 -hive.mapred.mode=strict -hive.optimize.s3.query=true -hive.ppd.remove.duplicatefilters=false -hive.security.authorization.createtable.owner.grants=ALL -hive.security.authorization.enabled=true -mapred.max.split.size=200000000 -webinterface.private.actions=true -hive.metastore.warehouse.dir=file://tmp/hive/warehouse -hive.metastore.checkForDefaultDb=true - -datanucleus.autoCreateSchema=true -datanucleus.fixedDatastore=false -datanucleus.autoCreateTables=true -datanucleus.rdbms.CheckExistTablesOrViews=true - -hadoop.tmp.dir=/tmp/metacat/hadoop/tmp -fs.s3.buffer.dir=/tmp/metacat/hadoop/s3,/tmp/metacat/hadoop/s3 -io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec -hive.metastore.fs.handler.class=com.netflix.metacat.connector.hive.metastore.HiveMetaStoreFsImpl - -javax.jdo.option.name=embedded-fast-hive-metastore -javax.jdo.option.username=metacat_user -javax.jdo.option.url=jdbc:mysql://hive-metastore-db:3306/fasthive?createDatabaseIfNotExist=true&useUnicode=true&characterEncoding=latin1&autoReconnect=true&sessionVariables=@@innodb_lock_wait_timeout=120&rewriteBatchedStatements=true&nullCatalogMeansCurrent=true -javax.jdo.option.driverClassName=com.mysql.jdbc.Driver -javax.jdo.option.password=metacat_user_password -javax.jdo.option.jmxEnabled=true -javax.jdo.option.maxActive=10 -javax.jdo.option.maxIdle=10 -javax.jdo.option.initialSize=2 -javax.jdo.option.minIdle=2 -javax.jdo.option.maxWait=60000 -javax.jdo.option.minEvictableIdleTimeMillis=180000 -javax.jdo.option.timeBetweenEvictionRunsMillis=10000 -javax.jdo.option.testOnBorrow=true -javax.jdo.option.testWhileIdle=true -javax.jdo.option.testOnReturn=false -javax.jdo.option.removeAbandonedTimeout=1800 -javax.jdo.option.removeAbandoned=true -javax.jdo.option.logAbandoned=true -javax.jdo.option.validationQuery=SELECT 1 -javax.jdo.option.jdbcInterceptors=org.apache.tomcat.jdbc.pool.interceptor.ConnectionState;org.apache.tomcat.jdbc.pool.interceptor.StatementFinalizer;org.apache.tomcat.jdbc.pool.interceptor.SlowQueryReportJmx(threshold=30000) -javax.jdo.option.defaultTransactionIsolation=READ_COMMITTED -javax.jdo.option.defaultAutoCommit=false - -hive.allow-drop-table=true -hive.allow-rename-table=true -metacat.schema.blacklist=information_schema -hive.use.embedded.sql.save.partitions=true -hive.use.embedded.sql.delete.partitions=true diff --git a/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-hive-metastore.properties b/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-hive-metastore.properties deleted file mode 100644 index eb938c29b..000000000 --- a/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/embedded-hive-metastore.properties +++ /dev/null @@ -1,64 +0,0 @@ -metacat.thrift.port=12003 -connector.name=hive -metacat.has-data-external=true - -hive.use.embedded.metastore=true -hive.use.embedded.fastservice=true -hive.exec.compress.intermediate=true -hive.exec.max.dynamic.partitions.pernode=10000 -hive.exec.max.dynamic.partitions=300000 -hive.exec.parallel=true -hive.exec.reducers.max=200 -hive.exec.scratchdir=/tmp/metacat/scratch -hive.hadoop.supports.splittable.combineinputformat=true -hive.intermediate.compression.codec=org.apache.hadoop.io.compress.SnappyCodec -hive.intermediate.compression.type=BLOCK -hive.mapjoin.localtask.max.memory.usage=0.9 -hive.mapjoin.smalltable.filesize=100000000 -hive.mapred.mode=strict -hive.optimize.s3.query=true -hive.ppd.remove.duplicatefilters=false -hive.security.authorization.createtable.owner.grants=ALL -hive.security.authorization.enabled=true -mapred.max.split.size=200000000 -webinterface.private.actions=true -hive.metastore.warehouse.dir=file://tmp/hive/warehouse -hive.metastore.checkForDefaultDb=true - -datanucleus.autoCreateSchema=true -datanucleus.fixedDatastore=false -datanucleus.autoCreateTables=true -datanucleus.rdbms.CheckExistTablesOrViews=true - -hadoop.tmp.dir=/tmp/metacat/hadoop/tmp -fs.s3.buffer.dir=/tmp/metacat/hadoop/s3,/tmp/metacat/hadoop/s3 -io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec -hive.metastore.fs.handler.class=com.netflix.metacat.connector.hive.metastore.HiveMetaStoreFsImpl - -javax.jdo.option.name=embedded-hive-metastore -javax.jdo.option.username=metacat_user -javax.jdo.option.url=jdbc:mysql://hive-metastore-db:3306/hive?createDatabaseIfNotExist=true&useUnicode=true&characterEncoding=latin1&autoReconnect=true&rewriteBatchedStatements=true&nullCatalogMeansCurrent=true -javax.jdo.option.driverClassName=com.mysql.jdbc.Driver -javax.jdo.option.password=metacat_user_password -javax.jdo.option.jmxEnabled=true -javax.jdo.option.maxActive=10 -javax.jdo.option.maxIdle=10 -javax.jdo.option.initialSize=2 -javax.jdo.option.minIdle=2 -javax.jdo.option.maxWait=60000 -javax.jdo.option.minEvictableIdleTimeMillis=180000 -javax.jdo.option.timeBetweenEvictionRunsMillis=10000 -javax.jdo.option.testOnBorrow=true -javax.jdo.option.testWhileIdle=true -javax.jdo.option.testOnReturn=false -javax.jdo.option.removeAbandonedTimeout=1800 -javax.jdo.option.removeAbandoned=true -javax.jdo.option.logAbandoned=true -javax.jdo.option.validationQuery=SELECT 1 -javax.jdo.option.jdbcInterceptors=org.apache.tomcat.jdbc.pool.interceptor.ConnectionState;org.apache.tomcat.jdbc.pool.interceptor.StatementFinalizer;org.apache.tomcat.jdbc.pool.interceptor.SlowQueryReportJmx(threshold=30000) -javax.jdo.option.defaultTransactionIsolation=READ_COMMITTED -javax.jdo.option.defaultAutoCommit=false - -hive.allow-drop-table=true -hive.allow-rename-table=true -metacat.schema.blacklist=information_schema diff --git a/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/hive-metastore.properties b/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/hive-metastore.properties index 345786630..7f197808b 100644 --- a/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/hive-metastore.properties +++ b/metacat-functional-tests/metacat-test-cluster/etc-metacat/catalog/hive-metastore.properties @@ -26,6 +26,7 @@ hive.assume-canonical-partition-keys=true hive.metastore-cache-ttl=1m hive.metastore-refresh-interval=1m hive.metastore-timeout=40s +hive.use.embedded.fastservice=true hive.metastore.uris=thrift://hive-metastore:9083 hive.parquet.use-column-names=false hive.s3.connect-timeout=2m @@ -39,3 +40,26 @@ hive.s3.staging-directory=/tmp hive.storage-format=PARQUET node.environment=default +javax.jdo.option.name=hive-metastore +javax.jdo.option.username=metacat_user +javax.jdo.option.url=jdbc:mysql://hive-metastore-db:3306/hive?createDatabaseIfNotExist=true&useUnicode=true&characterEncoding=latin1&autoReconnect=true&sessionVariables=@@innodb_lock_wait_timeout=120&rewriteBatchedStatements=true&nullCatalogMeansCurrent=true +javax.jdo.option.driverClassName=com.mysql.jdbc.Driver +javax.jdo.option.password=metacat_user_password +javax.jdo.option.jmxEnabled=true +javax.jdo.option.maxActive=10 +javax.jdo.option.maxIdle=10 +javax.jdo.option.initialSize=2 +javax.jdo.option.minIdle=2 +javax.jdo.option.maxWait=60000 +javax.jdo.option.minEvictableIdleTimeMillis=180000 +javax.jdo.option.timeBetweenEvictionRunsMillis=10000 +javax.jdo.option.testOnBorrow=true +javax.jdo.option.testWhileIdle=true +javax.jdo.option.testOnReturn=false +javax.jdo.option.removeAbandonedTimeout=1800 +javax.jdo.option.removeAbandoned=true +javax.jdo.option.logAbandoned=true +javax.jdo.option.validationQuery=SELECT 1 +javax.jdo.option.jdbcInterceptors=org.apache.tomcat.jdbc.pool.interceptor.ConnectionState;org.apache.tomcat.jdbc.pool.interceptor.StatementFinalizer;org.apache.tomcat.jdbc.pool.interceptor.SlowQueryReportJmx(threshold=30000) +javax.jdo.option.defaultTransactionIsolation=READ_COMMITTED +javax.jdo.option.defaultAutoCommit=false diff --git a/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeSpec.groovy b/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeSpec.groovy index c7193abd6..ea51e85e3 100644 --- a/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeSpec.groovy +++ b/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeSpec.groovy @@ -157,8 +157,8 @@ class MetacatSmokeSpec extends Specification { def createAllTypesTable() { when: - createTable('embedded-hive-metastore', 'smoke_db', 'metacat_all_types') - def table = api.getTable('embedded-hive-metastore', 'smoke_db', 'metacat_all_types', true, true, true) + createTable('hive-metastore', 'smoke_db', 'metacat_all_types') + def table = api.getTable('hive-metastore', 'smoke_db', 'metacat_all_types', true, true, true) then: noExceptionThrown() table.fields.find { it.name == 'latest_is_available' }.type == '{(array_element: map[chararray])}' @@ -170,8 +170,8 @@ class MetacatSmokeSpec extends Specification { def catalogNames = catalogs.collect { it.catalogName } then: catalogNames.size() > 0 - catalogNames.contains('embedded-hive-metastore') - catalogNames.contains('embedded-fast-hive-metastore') +// catalogNames.contains('embedded-hive-metastore') +// catalogNames.contains('embedded-fast-hive-metastore') catalogNames.contains('s3-mysql-db') catalogNames.contains('hive-metastore') } @@ -186,7 +186,7 @@ class MetacatSmokeSpec extends Specification { @Unroll def "Test create/get table with nested fields with upper case"() { given: - def catalogName = 'embedded-fast-hive-metastore' + def catalogName = 'hive-metastore' def databaseName = 'iceberg_db' def tableName = 'iceberg_table_with_upper_case_nested_fields' def uri = isLocalEnv ? String.format('file:/tmp/data/') : null @@ -319,9 +319,10 @@ class MetacatSmokeSpec extends Specification { } where: catalogName | databaseName | uri | error - 'embedded-hive-metastore' | 'smoke_db0' | 'file:/tmp/embedded-hive-metastore/smoke_db00' | null - 'embedded-fast-hive-metastore' | 'fsmoke_db0' | 'file:/tmp/embedded-fast-hive-metastore/fsmoke_db00' | null - 'embedded-fast-hive-metastore' | 'shard1' | null | null +// 'embedded-hive-metastore' | 'smoke_db0' | 'file:/tmp/embedded-hive-metastore/smoke_db00' | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db0' | 'file:/tmp/embedded-fast-hive-metastore/fsmoke_db00' | null +// 'embedded-fast-hive-metastore' | 'shard1' | null | null + 'hive-metastore' | 'shard1' | null | null 'hive-metastore' | 'hsmoke_db0' | 'file:/tmp/hive-metastore/hsmoke_db00' | null 's3-mysql-db' | 'smoke_db0' | null | null 'invalid-catalog' | 'smoke_db0' | null | MetacatNotFoundException.class @@ -344,9 +345,10 @@ class MetacatSmokeSpec extends Specification { api.deleteDatabase(catalogName, databaseName) where: catalogName | databaseName | uri - 'embedded-hive-metastore' | 'smoke_db0' | null - 'embedded-fast-hive-metastore' | 'fsmoke_db0' | 'file:/tmp/embedded-fast-hive-metastore/fsmoke_db00' - 'embedded-fast-hive-metastore' | 'shard1' | null +// 'embedded-hive-metastore' | 'smoke_db0' | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db0' | 'file:/tmp/embedded-fast-hive-metastore/fsmoke_db00' +// 'embedded-fast-hive-metastore' | 'shard1' | null + 'hive-metastore' | 'smoke_db0' | 'file:/tmp/warehouse/smoke_db0.db' 'hive-metastore' | 'hsmoke_db0' | null } @@ -400,17 +402,17 @@ class MetacatSmokeSpec extends Specification { } where: catalogName | databaseName | tableName | setUri | setNull | error - 'embedded-hive-metastore' | 'smoke_db1' | 'test_create_table' | true | false | null - 'embedded-hive-metastore' | 'smoke_db1' | 'test_create_table' | false | false | null - 'embedded-hive-metastore' | 'smoke_db1' | 'test_create_table1'| false | true | null - 'embedded-fast-hive-metastore' | 'fsmoke_db1' | 'test_create_table' | true | false | null - 'embedded-fast-hive-metastore' | 'fsmoke_db1' | 'test_create_table' | false | false | null - 'embedded-fast-hive-metastore' | 'fsmoke_db1' | 'test_create_table1'| false | true | null - 'embedded-fast-hive-metastore' | 'fsmoke_db1' | 'test.create_table1'| false | true | InvalidMetaException.class - 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | true | false | null - 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | false | false | null - 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table1'| false | true | null - 'embedded-fast-hive-metastore' | 'shard' | 'test.create_table1'| false | true | InvalidMetaException.class +// 'embedded-hive-metastore' | 'smoke_db1' | 'test_create_table' | true | false | null +// 'embedded-hive-metastore' | 'smoke_db1' | 'test_create_table' | false | false | null +// 'embedded-hive-metastore' | 'smoke_db1' | 'test_create_table1'| false | true | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db1' | 'test_create_table' | true | false | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db1' | 'test_create_table' | false | false | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db1' | 'test_create_table1'| false | true | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db1' | 'test.create_table1'| false | true | InvalidMetaException.class +// 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | true | false | null +// 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | false | false | null +// 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table1'| false | true | null +// 'embedded-fast-hive-metastore' | 'shard' | 'test.create_table1'| false | true | InvalidMetaException.class 'hive-metastore' | 'hsmoke_db1' | 'test_create_table' | true | false | null 'hive-metastore' | 'hsmoke_db1' | 'test_create_table' | false | false | null 'hive-metastore' | 'hsmoke_db1' | 'test_create_table1'| false | true | null @@ -423,7 +425,7 @@ class MetacatSmokeSpec extends Specification { def testCreateTableWithOwner() { given: - def catalogName = 'embedded-fast-hive-metastore' + def catalogName = 'hive-metastore' def databaseName = 'fsmoke_db1_owner' try { api.createDatabase(catalogName, databaseName, new DatabaseCreateRequestDto()) @@ -508,7 +510,7 @@ class MetacatSmokeSpec extends Specification { where: catalogName | databaseName | tableName | setUri | setNull | error - 'embedded-fast-hive-metastore' | 'fsmoke_db2' | 'test_create_table' | true | false | null + 'hive-metastore' | 'fsmoke_db2' | 'test_create_table' | true | false | null } def "Test create/delete database/table for #catalogName/#databaseName/#tableName with ACL"() { @@ -534,12 +536,12 @@ class MetacatSmokeSpec extends Specification { where: catalogName | databaseName | tableName - 'embedded-fast-hive-metastore' | 'fsmoke_acl' | 'test_create_table' + 'hive-metastore' | 'fsmoke_acl' | 'test_create_table' } def "Test get table names"() { given: - def catalogName = 'embedded-fast-hive-metastore' + def catalogName = 'hive-metastore' def databaseName = 'fsmoke_db_names' def database1Name = 'fsmoke_db1_names' def database2Name = 'fsmoke_db2_names' @@ -590,7 +592,7 @@ class MetacatSmokeSpec extends Specification { def "Test materialized common view create/drop"() { given: - def catalogName = 'embedded-fast-hive-metastore' + def catalogName = 'hive-metastore' def databaseName = 'iceberg_db' def storageTableName = 'st_iceberg_table' def commonViewName = 'test_common_view' @@ -693,7 +695,7 @@ class MetacatSmokeSpec extends Specification { api.doesTableExist(catalogName, databaseName, tableName) updatedTable.getMetadata().get('metadata_location') == metadataLocation1 updatedTable != null - if (catalogName == 'embedded-fast-hive-metastore') { + if (catalogName == 'hive-metastore') { updatedTable.getDataUri() == updatedUri updatedTable.getSerde().getInputFormat() == 'org.apache.hadoop.mapred.TextInputFormat' } @@ -755,13 +757,13 @@ class MetacatSmokeSpec extends Specification { cleanup: FileUtils.deleteQuietly(icebergManifestFile) where: - catalogName << ['polaris-metastore', 'embedded-fast-hive-metastore'] + catalogName << ['polaris-metastore', 'hive-metastore'] } @Unroll def "Test get iceberg table and partitions"() { given: - def catalogName = 'embedded-fast-hive-metastore' + def catalogName = 'hive-metastore' def databaseName = 'iceberg_db' def tableName = 'iceberg_table_6' def uri = isLocalEnv ? String.format('file:/tmp/data/') : null @@ -831,9 +833,8 @@ class MetacatSmokeSpec extends Specification { api.deleteTable(catalogName, databaseName, tableName) } - @Unroll def "Test get partitions from iceberg table using #filter"() { - def catalogName = 'embedded-fast-hive-metastore' + def catalogName = 'hive-metastore' def databaseName = 'iceberg_db' def tableName = 'iceberg_table_6' def tableDto = new TableDto( @@ -894,16 +895,16 @@ class MetacatSmokeSpec extends Specification { metadataApi.deleteDefinitionMetadata(name, true) where: catalogName | databaseName | tableName | count | result - 'embedded-hive-metastore' | 'smoke_ddb1' | 'test_create_table' | 15 | 0 - 'embedded-fast-hive-metastore' | 'fsmoke_ddb1' | 'test_create_table' | 15 | 0 - 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | 15 | 0 +// 'embedded-hive-metastore' | 'smoke_ddb1' | 'test_create_table' | 15 | 0 +// 'embedded-fast-hive-metastore' | 'fsmoke_ddb1' | 'test_create_table' | 15 | 0 +// 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | 15 | 0 'hive-metastore' | 'hsmoke_ddb' | 'test_create_table' | 15 | 0 'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table1' | 15 | 0 'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table2' | 15 | 1 's3-mysql-db' | 'smoke_ddb1' | 'test_create_table' | 15 | 0 - 'embedded-hive-metastore' | 'smoke_ddb1' | 'test_create_table' | 10 | 0 - 'embedded-fast-hive-metastore' | 'fsmoke_ddb1' | 'test_create_table' | 10 | 0 - 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | 10 | 0 +// 'embedded-hive-metastore' | 'smoke_ddb1' | 'test_create_table' | 10 | 0 +// 'embedded-fast-hive-metastore' | 'fsmoke_ddb1' | 'test_create_table' | 10 | 0 +// 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | 10 | 0 'hive-metastore' | 'hsmoke_ddb' | 'test_create_table' | 10 | 0 'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table1' | 10 | 0 'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table2' | 10 | 1 @@ -925,9 +926,9 @@ class MetacatSmokeSpec extends Specification { api.deleteTable(catalogName, databaseName, 'metacat_all_types_copy') where: catalogName | databaseName - 'embedded-hive-metastore' | 'smoke_db1' - 'embedded-fast-hive-metastore' | 'fsmoke_db1' - 'embedded-fast-hive-metastore' | 'shard' +// 'embedded-hive-metastore' | 'smoke_db1' +// 'embedded-fast-hive-metastore' | 'fsmoke_db1' +// 'embedded-fast-hive-metastore' | 'shard' 'hive-metastore' | 'hsmoke_db1' 's3-mysql-db' | 'smoke_db1' } @@ -949,9 +950,9 @@ class MetacatSmokeSpec extends Specification { api.deleteTable(catalogName, databaseName, tableName) where: catalogName | databaseName | tableName - 'embedded-hive-metastore' | 'smoke_db2' | 'part' - 'embedded-fast-hive-metastore' | 'fsmoke_db2' | 'part' - 'embedded-fast-hive-metastore' | 'shard' | 'part' +// 'embedded-hive-metastore' | 'smoke_db2' | 'part' +// 'embedded-fast-hive-metastore' | 'fsmoke_db2' | 'part' +// 'embedded-fast-hive-metastore' | 'shard' | 'part' 'hive-metastore' | 'hsmoke_db2' | 'part' 's3-mysql-db' | 'smoke_db2' | 'part' 's3-mysql-db' | 'smoke_db2' | 'PART' @@ -971,11 +972,11 @@ class MetacatSmokeSpec extends Specification { api.deleteTable(catalogName, databaseName, newTableName) where: catalogName | databaseName | tableName | external | newTableName - 'embedded-hive-metastore' | 'smoke_db3' | 'test_create_table' | null | 'test_create_table1' - 'embedded-fast-hive-metastore' | 'fsmoke_db3' | 'test_create_table' | null | 'test_create_table1' - 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | null | 'test_create_table1' - 'embedded-fast-hive-metastore' | 'shard' | 'test_create_tablet' | 'TRUE' | 'test_create_tablet1' - 'embedded-fast-hive-metastore' | 'shard' | 'test_create_tablef' | 'FALSE' | 'test_create_tablef1' +// 'embedded-hive-metastore' | 'smoke_db3' | 'test_create_table' | null | 'test_create_table1' +// 'embedded-fast-hive-metastore' | 'fsmoke_db3' | 'test_create_table' | null | 'test_create_table1' +// 'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | null | 'test_create_table1' +// 'embedded-fast-hive-metastore' | 'shard' | 'test_create_tablet' | 'TRUE' | 'test_create_tablet1' +// 'embedded-fast-hive-metastore' | 'shard' | 'test_create_tablef' | 'FALSE' | 'test_create_tablef1' 'hive-metastore' | 'hsmoke_db3' | 'test_create_table' | null | 'test_create_table1' } @@ -1049,8 +1050,8 @@ class MetacatSmokeSpec extends Specification { api.deleteTable(catalogName, databaseName, tableNameTagNoRenamed) where: catalogName | databaseName - 'embedded-fast-hive-metastore' | 'hsmoke_db3' - 'embedded-fast-hive-metastore' | 'fsmoke_ddb1' + 'hive-metastore' | 'hsmoke_db3' + 'hive-metastore' | 'fsmoke_ddb1' } @Unroll @@ -1080,17 +1081,17 @@ class MetacatSmokeSpec extends Specification { } where: catalogName | databaseName | tableName | viewName | error | repeat - 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | null | false - 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | null | true - 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | null | false - 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | null | true - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | null | false - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | null | true +// 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | null | false +// 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | null | true +// 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | null | false +// 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | null | true +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | null | false +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | null | true 'hive-metastore' | 'hsmoke_db4' | 'part' | 'part_view' | null | false 'hive-metastore' | 'hsmoke_db4' | 'part' | 'part_view' | null | true - 'embedded-hive-metastore' | 'smoke_db4' | 'metacat_all_types' | 'part_view' | null | false - 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'metacat_all_types' | 'part_view' | null | false - 'embedded-fast-hive-metastore' | 'shard' | 'metacat_all_types' | 'part_view' | null | false +// 'embedded-hive-metastore' | 'smoke_db4' | 'metacat_all_types' | 'part_view' | null | false +// 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'metacat_all_types' | 'part_view' | null | false +// 'embedded-fast-hive-metastore' | 'shard' | 'metacat_all_types' | 'part_view' | null | false 's3-mysql-db' | 'smoke_db4' | 'part' | 'part_view' | null | false 'xyz' | 'smoke_db4' | 'z' | 'part_view' | MetacatNotFoundException.class | false } @@ -1121,15 +1122,15 @@ class MetacatSmokeSpec extends Specification { } where: catalogName | databaseName | tableName | viewName | error | repeat - 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | null | false - 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | null | true - 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | null | false - 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | null | true - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | null | false - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | null | true +// 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | null | false +// 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | null | true +// 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | null | false +// 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | null | true +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | null | false +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | null | true 'hive-metastore' | 'hsmoke_db4' | 'part' | 'part_view' | null | false 'hive-metastore' | 'hsmoke_db4' | 'part' | 'part_view' | null | true - 'embedded-hive-metastore' | 'smoke_db4' | 'metacat_all_types' | 'part_view' | null | false +// 'embedded-hive-metastore' | 'smoke_db4' | 'metacat_all_types' | 'part_view' | null | false 's3-mysql-db' | 'smoke_db4' | 'part' | 'part_view' | null | false 'xyz' | 'smoke_db4' | 'z' | 'part_view' | MetacatNotFoundException.class | false } @@ -1143,9 +1144,9 @@ class MetacatSmokeSpec extends Specification { noExceptionThrown() where: catalogName | databaseName | tableName - 'embedded-hive-metastore' | 'smoke_db' | 'part' - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' - 'embedded-fast-hive-metastore' | 'shard' | 'part' +// 'embedded-hive-metastore' | 'smoke_db' | 'part' +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' +// 'embedded-fast-hive-metastore' | 'shard' | 'part' 'hive-metastore' | 'hsmoke_db' | 'part' 's3-mysql-db' | 'smoke_db' | 'part' } @@ -1238,8 +1239,8 @@ class MetacatSmokeSpec extends Specification { where: catalogName | databaseName | tableName | auditOnlyPartSize - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 2 - 'embedded-fast-hive-metastore' | 'audit' | 'fsmoke_db__part__audit_12345' | 1 + 'hive-metastore' | 'fsmoke_db' | 'part' | 2 + 'hive-metastore' | 'audit' | 'fsmoke_db__part__audit_12345' | 1 } @Unroll @@ -1300,27 +1301,27 @@ class MetacatSmokeSpec extends Specification { } where: catalogName | databaseName | tableName | partitionName | uriSuffix | uriResult | repeat | alter | error - 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'one=xyz' | '' | '' | false | false | null - 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'one=xyz' | '/' | '' | false | false | null - 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'one=xyz' | '' | '' | true | false | null - 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'one=xyz' | '' | '' | true | true | null - 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'two=xyz' | '' | '' | false | false | MetacatBadRequestException.class - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '' | '' | false | false | null - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '/' | '' | false | false | null - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '' | '' | true | false | null - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '/' | '' | true | false | null - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '' | '' | true | true | null - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '/' | '' | true | true | null - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | "one=xy'z" | '' | '' | false | false | null - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | "one=xy'z" | '' | '' | false | true | null - 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'two=xyz' | '' | '' | false | false | MetacatBadRequestException.class - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '' | '' | false | false | null - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '/' | '' | false | false | null - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '' | '' | true | false | null - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '/' | '' | true | false | null - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '' | '' | true | true | null - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '/' | '' | true | true | null - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'two=xyz' | '' | '' | false | false | MetacatBadRequestException.class +// 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'one=xyz' | '' | '' | false | false | null +// 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'one=xyz' | '/' | '' | false | false | null +// 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'one=xyz' | '' | '' | true | false | null +// 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'one=xyz' | '' | '' | true | true | null +// 'embedded-hive-metastore' | 'smoke_db' | 'part' | 'two=xyz' | '' | '' | false | false | MetacatBadRequestException.class +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '' | '' | false | false | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '/' | '' | false | false | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '' | '' | true | false | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '/' | '' | true | false | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '' | '' | true | true | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'one=xyz' | '/' | '' | true | true | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | "one=xy'z" | '' | '' | false | false | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | "one=xy'z" | '' | '' | false | true | null +// 'embedded-fast-hive-metastore' | 'fsmoke_db' | 'part' | 'two=xyz' | '' | '' | false | false | MetacatBadRequestException.class +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '' | '' | false | false | null +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '/' | '' | false | false | null +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '' | '' | true | false | null +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '/' | '' | true | false | null +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '' | '' | true | true | null +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | '/' | '' | true | true | null +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'two=xyz' | '' | '' | false | false | MetacatBadRequestException.class 'hive-metastore' | 'hsmoke_db' | 'part' | 'one=xyz' | '' | '' | false | false | null 'hive-metastore' | 'hsmoke_db' | 'part' | 'one=xyz' | '' | '' | true | false | null 'hive-metastore' | 'hsmoke_db' | 'part' | 'one=xyz' | '' | '' | true | true | null @@ -1353,15 +1354,15 @@ class MetacatSmokeSpec extends Specification { partitionApi.deletePartitions(catalogName, databaseName, tableName, [partitionName]) where: catalogName | databaseName | tableName | partitionName | serdeNull | locationNull - 'embedded-hive-metastore' | 'smoke_db7' | 'part' | 'one=xyz' | true | true - 'embedded-hive-metastore' | 'smoke_db7' | 'part' | 'one=xyz' | true | false - 'embedded-hive-metastore' | 'smoke_db7' | 'part' | 'one=xyz' | false | true - 'embedded-fast-hive-metastore' | 'fsmoke_db7' | 'part' | 'one=xyz' | true | true - 'embedded-fast-hive-metastore' | 'fsmoke_db7' | 'part' | 'one=xyz' | true | false - 'embedded-fast-hive-metastore' | 'fsmoke_db7' | 'part' | 'one=xyz' | false | true - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | true | true - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | true | false - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | false | true +// 'embedded-hive-metastore' | 'smoke_db7' | 'part' | 'one=xyz' | true | true +// 'embedded-hive-metastore' | 'smoke_db7' | 'part' | 'one=xyz' | true | false +// 'embedded-hive-metastore' | 'smoke_db7' | 'part' | 'one=xyz' | false | true +// 'embedded-fast-hive-metastore' | 'fsmoke_db7' | 'part' | 'one=xyz' | true | true +// 'embedded-fast-hive-metastore' | 'fsmoke_db7' | 'part' | 'one=xyz' | true | false +// 'embedded-fast-hive-metastore' | 'fsmoke_db7' | 'part' | 'one=xyz' | false | true +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | true | true +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | true | false +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'one=xyz' | false | true 'hive-metastore' | 'hsmoke_db7' | 'part' | 'one=xyz' | true | true 'hive-metastore' | 'hsmoke_db7' | 'part' | 'one=xyz' | true | false 'hive-metastore' | 'hsmoke_db7' | 'part' | 'one=xyz' | false | true @@ -1480,7 +1481,7 @@ class MetacatSmokeSpec extends Specification { then: noExceptionThrown() where: - catalogName << ['embedded-hive-metastore', 'hive-metastore', 's3-mysql-db'] + catalogName << ['hive-metastore', 's3-mysql-db'] } @Unroll @@ -1529,7 +1530,7 @@ class MetacatSmokeSpec extends Specification { then: noExceptionThrown() where: - catalogName << ['embedded-hive-metastore', 'hive-metastore', 's3-mysql-db'] + catalogName << ['hive-metastore', 's3-mysql-db'] } @Unroll @@ -1537,17 +1538,17 @@ class MetacatSmokeSpec extends Specification { given: if (cursor == 'start') { def uri = isLocalEnv ? 'file:/tmp/abc' : null - createTable('embedded-hive-metastore', 'smoke_db', 'parts') - partitionApi.savePartitions('embedded-hive-metastore', 'smoke_db', 'parts', new PartitionsSaveRequestDto(partitions: PigDataDtoProvider.getPartitions('embedded-hive-metastore', 'smoke_db', 'parts', 'one=xyz/total=1', uri, 10))) + createTable('hive-metastore', 'smoke_db', 'parts') + partitionApi.savePartitions('hive-metastore', 'smoke_db', 'parts', new PartitionsSaveRequestDto(partitions: PigDataDtoProvider.getPartitions('hive-metastore', 'smoke_db', 'parts', 'one=xyz/total=1', uri, 10))) } - def partitionKeys = partitionApi.getPartitionKeys('embedded-hive-metastore', 'smoke_db', 'parts', filter, null, null, offset, limit) + def partitionKeys = partitionApi.getPartitionKeys('hive-metastore', 'smoke_db', 'parts', filter, null, null, offset, limit) expect: partitionKeys.size() == result cleanup: if (cursor == 'end') { - def partitionKeysToDrop = partitionApi.getPartitionKeys('embedded-hive-metastore', 'smoke_db', 'parts', null, null, null, null, null) - partitionApi.deletePartitions('embedded-hive-metastore', 'smoke_db', 'parts', partitionKeysToDrop) + def partitionKeysToDrop = partitionApi.getPartitionKeys('hive-metastore', 'smoke_db', 'parts', null, null, null, null, null) + partitionApi.deletePartitions('hive-metastore', 'smoke_db', 'parts', partitionKeysToDrop) } where: cursor | filter | offset | limit | result @@ -1584,19 +1585,19 @@ class MetacatSmokeSpec extends Specification { given: if (cursor == 'start') { def uri = isLocalEnv ? 'file:/tmp/abc' : null - createTable('embedded-hive-metastore', 'smoke_db', 'part_hyphen') - partitionApi.savePartitions('embedded-hive-metastore', 'smoke_db', 'part_hyphen', new PartitionsSaveRequestDto(partitions: PigDataDtoProvider.getPartitions('embedded-hive-metastore', 'smoke_db', 'part_hyphen', 'one-one=xyz/total=1', uri, 10))) - partitionApi.savePartitions('embedded-hive-metastore', 'smoke_db', 'part_hyphen', new PartitionsSaveRequestDto(partitions: PigDataDtoProvider.getPartitions('embedded-hive-metastore', 'smoke_db', 'part_hyphen', 'one-one=__HIVE_DEFAULT_PARTITION__/total=1', uri, 10))) - partitionApi.savePartitions('embedded-hive-metastore', 'smoke_db', 'part_hyphen', new PartitionsSaveRequestDto(partitions: [PigDataDtoProvider.getPartition('embedded-hive-metastore', 'smoke_db', 'part_hyphen', 'one-one=xyz/total=__HIVE_DEFAULT_PARTITION__', uri)])) + createTable('hive-metastore', 'smoke_db', 'part_hyphen') + partitionApi.savePartitions('hive-metastore', 'smoke_db', 'part_hyphen', new PartitionsSaveRequestDto(partitions: PigDataDtoProvider.getPartitions('hive-metastore', 'smoke_db', 'part_hyphen', 'one-one=xyz/total=1', uri, 10))) + partitionApi.savePartitions('hive-metastore', 'smoke_db', 'part_hyphen', new PartitionsSaveRequestDto(partitions: PigDataDtoProvider.getPartitions('hive-metastore', 'smoke_db', 'part_hyphen', 'one-one=__HIVE_DEFAULT_PARTITION__/total=1', uri, 10))) + partitionApi.savePartitions('hive-metastore', 'smoke_db', 'part_hyphen', new PartitionsSaveRequestDto(partitions: [PigDataDtoProvider.getPartition('hive-metastore', 'smoke_db', 'part_hyphen', 'one-one=xyz/total=__HIVE_DEFAULT_PARTITION__', uri)])) } - def partitionKeys = partitionApi.getPartitionKeys('embedded-hive-metastore', 'smoke_db', 'part_hyphen', filter, null, null, offset, limit) + def partitionKeys = partitionApi.getPartitionKeys('hive-metastore', 'smoke_db', 'part_hyphen', filter, null, null, offset, limit) expect: partitionKeys.size() == result cleanup: if (cursor == 'end') { - def partitionKeysToDrop = partitionApi.getPartitionKeys('embedded-hive-metastore', 'smoke_db', 'part_hyphen', null, null, null, null, null) - partitionApi.deletePartitions('embedded-hive-metastore', 'smoke_db', 'part_hyphen', partitionKeysToDrop) + def partitionKeysToDrop = partitionApi.getPartitionKeys('hive-metastore', 'smoke_db', 'part_hyphen', null, null, null, null, null) + partitionApi.deletePartitions('hive-metastore', 'smoke_db', 'part_hyphen', partitionKeysToDrop) } where: cursor | filter | offset | limit | result @@ -1632,21 +1633,21 @@ class MetacatSmokeSpec extends Specification { partitionApi.deletePartitions(catalogName, databaseName, tableName, partitionNames) where: catalogName | databaseName | tableName | partitionName | count | alter - 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 10 | 0 - 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 10 | 10 - 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 10 | 5 - 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 10 | 0 - 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 10 | 10 - 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 10 | 5 +// 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 10 | 0 +// 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 10 | 10 +// 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 10 | 5 +// 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 10 | 0 +// 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 10 | 10 +// 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 10 | 5 'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 10 | 0 'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 10 | 10 'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 10 | 5 - 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 0 - 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 15 - 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 5 - 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 0 - 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 15 - 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 5 +// 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 0 +// 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 15 +// 'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 5 +// 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 0 +// 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 15 +// 'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 5 'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 15 | 0 'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 15 | 15 'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 15 | 5 @@ -1655,7 +1656,7 @@ class MetacatSmokeSpec extends Specification { @Unroll def "Test Get partitions threshold"() { given: - def catalogName = 'embedded-fast-hive-metastore' + def catalogName = 'hive-metastore' def databaseName = 'fsmoke_db5' def tableName = 'part' def request = new PartitionsSaveRequestDto() @@ -1708,12 +1709,12 @@ class MetacatSmokeSpec extends Specification { api.deleteTable(catalogName, databaseName, tableName) where: catalogName | databaseName | tableName | tags | repeat - 'embedded-hive-metastore' | 'smoke_db6' | 'part' | ['test'] as Set | true - 'embedded-hive-metastore' | 'smoke_db6' | 'part' | ['test', 'unused'] as Set | false - 'embedded-fast-hive-metastore' | 'fsmoke_db6' | 'part' | ['test'] as Set | true - 'embedded-fast-hive-metastore' | 'fsmoke_db6' | 'part' | ['test', 'unused'] as Set | false - 'embedded-fast-hive-metastore' | 'shard' | 'part' | ['test'] as Set | true - 'embedded-fast-hive-metastore' | 'shard' | 'part' | ['test', 'unused'] as Set | false +// 'embedded-hive-metastore' | 'smoke_db6' | 'part' | ['test'] as Set | true +// 'embedded-hive-metastore' | 'smoke_db6' | 'part' | ['test', 'unused'] as Set | false +// 'embedded-fast-hive-metastore' | 'fsmoke_db6' | 'part' | ['test'] as Set | true +// 'embedded-fast-hive-metastore' | 'fsmoke_db6' | 'part' | ['test', 'unused'] as Set | false +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | ['test'] as Set | true +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | ['test', 'unused'] as Set | false 'hive-metastore' | 'hsmoke_db6' | 'part' | ['test'] as Set | true 'hive-metastore' | 'hsmoke_db6' | 'part' | ['test', 'unused'] as Set | false 's3-mysql-db' | 'smoke_db6' | 'part' | ['test'] as Set | true @@ -1779,12 +1780,12 @@ class MetacatSmokeSpec extends Specification { where: catalogName | databaseName | tableName | tags | repeat - 'embedded-hive-metastore' | 'smoke_db6' | 'part' | ['test_tag'] as List | true - 'embedded-hive-metastore' | 'smoke_db6' | 'part' | ['test_tag', 'unused'] as List | false - 'embedded-fast-hive-metastore' | 'fsmoke_db6' | 'part' | ['test_tag'] as List | true - 'embedded-fast-hive-metastore' | 'fsmoke_db6' | 'part' | ['test_tag', 'unused'] as List | false - 'embedded-fast-hive-metastore' | 'shard' | 'part' | ['test_tag'] as List | true - 'embedded-fast-hive-metastore' | 'shard' | 'part' | ['test_tag', 'unused'] as List | false +// 'embedded-hive-metastore' | 'smoke_db6' | 'part' | ['test_tag'] as List | true +// 'embedded-hive-metastore' | 'smoke_db6' | 'part' | ['test_tag', 'unused'] as List | false +// 'embedded-fast-hive-metastore' | 'fsmoke_db6' | 'part' | ['test_tag'] as List | true +// 'embedded-fast-hive-metastore' | 'fsmoke_db6' | 'part' | ['test_tag', 'unused'] as List | false +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | ['test_tag'] as List | true +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | ['test_tag', 'unused'] as List | false 'hive-metastore' | 'hsmoke_db6' | 'part' | ['test_tag'] as List | true 'hive-metastore' | 'hsmoke_db6' | 'part' | ['test_tag', 'unused'] as List | false 's3-mysql-db' | 'smoke_db6' | 'part' | ['test_tag'] as List | true @@ -1825,9 +1826,9 @@ class MetacatSmokeSpec extends Specification { api.deleteMView(catalogName, databaseName, tableName, viewName) where: catalogName | databaseName | tableName | viewName |tags - 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | ['test_tag'] as List - 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | ['test_tag'] as List - 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | ['test_tag'] as List +// 'embedded-hive-metastore' | 'smoke_db4' | 'part' | 'part_view' | ['test_tag'] as List +// 'embedded-fast-hive-metastore' | 'fsmoke_db4' | 'part' | 'part_view' | ['test_tag'] as List +// 'embedded-fast-hive-metastore' | 'shard' | 'part' | 'part_view' | ['test_tag'] as List 'hive-metastore' | 'hsmoke_db4' | 'part' | 'part_view' | ['test_tag'] as List 's3-mysql-db' | 'smoke_db4' | 'part' | 'part_view' | ['test_tag'] as List } @@ -1841,10 +1842,10 @@ class MetacatSmokeSpec extends Specification { thrown(MetacatNotFoundException) where: catalogName | databaseName | tableName | partitionNames - 'embedded-hive-metastore' | 'smoke_db' | 'part' | ['one=invalid'] - 'embedded-hive-metastore' | 'smoke_db' | 'part' | ['one=test', 'one=invalid'] - 'embedded-hive-metastore' | 'smoke_db' | 'part' | ['one=test', 'one=invalid'] - 'embedded-hive-metastore' | 'smoke_db' | 'invalid' | ['one=test', 'one=invalid'] +// 'embedded-hive-metastore' | 'smoke_db' | 'part' | ['one=invalid'] +// 'embedded-hive-metastore' | 'smoke_db' | 'part' | ['one=test', 'one=invalid'] +// 'embedded-hive-metastore' | 'smoke_db' | 'part' | ['one=test', 'one=invalid'] +// 'embedded-hive-metastore' | 'smoke_db' | 'invalid' | ['one=test', 'one=invalid'] 'hive-metastore' | 'hsmoke_db' | 'part' | ['one=invalid'] 'hive-metastore' | 'hsmoke_db' | 'part' | ['one=test', 'one=invalid'] 'hive-metastore' | 'hsmoke_db' | 'part' | ['one=test', 'one=invalid'] @@ -1865,13 +1866,13 @@ class MetacatSmokeSpec extends Specification { api.deleteTable(qName.getCatalogName(), qName.getDatabaseName(), qName.getTableName()) where: name | force - 'embedded-hive-metastore/smoke_db/dm' | false - 'embedded-hive-metastore/smoke_db/dm' | true + 'hive-metastore/smoke_db/dm' | false + 'hive-metastore/smoke_db/dm' | true } def "List definition metadata valid and invalid sortBy" () { given: - def qName = "embedded-hive-metastore/smoke_db/dm" + def qName = "hive-metastore/smoke_db/dm" when: metadataApi.getDefinitionMetadataList("zz_invalid", null, null, null, null, null, qName, null) then: @@ -1897,9 +1898,9 @@ class MetacatSmokeSpec extends Specification { then: list.isEmpty() where: - name << ['embedded-hive-metastore/invalid/dm', - 'embedded-hive-metastore/invalid/dm/vm', - 'embedded-hive-metastore/invalid/dm/vm=1'] + name << ['hive-metastore/invalid/dm', + 'hive-metastore/invalid/dm/vm', + 'hive-metastore/invalid/dm/vm=1'] } def "Test Set tags for all scenarios"() { @@ -1983,7 +1984,7 @@ class MetacatSmokeSpec extends Specification { def 'testCloneTableE2E'() { given: - def catalogName = 'embedded-fast-hive-metastore' + def catalogName = 'hive-metastore' def databaseName = 'iceberg_db' // First parent child connected component @@ -2048,16 +2049,16 @@ class MetacatSmokeSpec extends Specification { then: // Test Parent 1 parentChildInfo assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() - assert parentChildRelV1.getChildren(catalogName, databaseName, parent1) == [new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid")] as Set + assert parentChildRelV1.getChildren(catalogName, databaseName, parent1) == [new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid")] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent1).isEmpty() // Test Child11 parentChildInfo assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") assert child11Table.definitionMetadata.get("random_key").asText() == "random_value" JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', false) - assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set assert parentChildRelV1.getChildren(catalogName, databaseName, child11).isEmpty() /* @@ -2077,17 +2078,17 @@ class MetacatSmokeSpec extends Specification { // Test Parent 1 parentChildInfo assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent1).isEmpty() // Test Child11 parentChildInfo assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") assert child11Table.definitionMetadata.get("random_key").asText() == "random_value" JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set /* Step 3: create another table with the same child1 name but different uuid under the same parent should fail @@ -2148,17 +2149,17 @@ class MetacatSmokeSpec extends Specification { then: // Test Parent 1 parentChildInfo assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() - assert parentChildRelV1.getChildren(catalogName, databaseName, parent1) == [new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid")] as Set + assert parentChildRelV1.getChildren(catalogName, databaseName, parent1) == [new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid")] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent1).isEmpty() // Test Child11 parentChildInfo assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") assert child11Table.definitionMetadata.get("random_key").asText() == "random_value" JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set /* @@ -2176,18 +2177,18 @@ class MetacatSmokeSpec extends Specification { // Test Parent 1 parentChildInfo assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent1).isEmpty() // Test Child12 parentChildInfo assert !child12Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child12Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child12).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set /* Step 8: create a parent table on top of another parent table should fail @@ -2234,16 +2235,16 @@ class MetacatSmokeSpec extends Specification { // Test Parent 2 parentChildInfo assert parent2Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent2) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent2).isEmpty() // Test Child21 parentChildInfo JSONAssert.assertEquals(child21Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child21).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child21) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent2", "CLONE", "p2_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child21) == [new ParentInfoDto("hive-metastore/iceberg_db/parent2", "CLONE", "p2_uuid")] as Set /* Step 11: Create a table newParent1 without any parent child rel info @@ -2267,25 +2268,25 @@ class MetacatSmokeSpec extends Specification { // Test Parent 1 parentChildInfo assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent1).isEmpty() // Test Child11 parentChildInfo assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") assert child11Table.definitionMetadata.get("random_key").asText() == "random_value" JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set // Test Child12 parentChildInfo assert !child12Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child12Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child12).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set /* Step 12: Attempt to rename parent1 to parent2 which has parent child relationship and should fail @@ -2307,35 +2308,35 @@ class MetacatSmokeSpec extends Specification { // Test Parent 1 parentChildInfo assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent1).isEmpty() // Test Child11 parentChildInfo assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") assert child11Table.definitionMetadata.get("random_key").asText() == "random_value" JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent1","relationType":"CLONE", "uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set // Test Child12 parentChildInfo assert !child12Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child12Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child12).isEmpty() // Test Parent 2 parentChildInfo assert parent2Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent2) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") ] as Set // Test Child21 parentChildInfo JSONAssert.assertEquals(child21Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child21).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("hive-metastore/iceberg_db/parent1", "CLONE", "p1_uuid")] as Set /* @@ -2353,25 +2354,25 @@ class MetacatSmokeSpec extends Specification { assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, renameParent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, renameParent1).isEmpty() // Test Child11 parentChildInfo assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set // Test Child12 parentChildInfo assert !child12Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child12Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child12).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set //get the parent oldName should fail as it no longer exists when: @@ -2402,25 +2403,25 @@ class MetacatSmokeSpec extends Specification { assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, renameParent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, renameParent1).isEmpty() // Test Child11 parentChildInfo assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set // Test Child12 parentChildInfo assert !child12Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child12Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child12).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set /* Step 15: Create a table renameChild11 with parent childInfo and then try to rename child11 to renameChild11, which should fail @@ -2447,32 +2448,32 @@ class MetacatSmokeSpec extends Specification { assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, renameParent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid"), - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_child11", "CLONE", "random_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child11", "CLONE", "c11_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/rename_child11", "CLONE", "random_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, renameParent1).isEmpty() // Test Child11 parentChildInfo assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child11) == [new ParentInfoDto("hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set // Test Child12 parentChildInfo assert !child12Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child12Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child12).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child12) == [new ParentInfoDto("hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set // Test renameChild11Table parentChildInfo assert !renameChild11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(renameChild11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, renameChild11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, renameChild11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, renameChild11) == [new ParentInfoDto("hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set /* @@ -2488,18 +2489,18 @@ class MetacatSmokeSpec extends Specification { // Test parent1Table parentChildInfo with newName assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, renameParent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_child11", "CLONE", "c11_uuid"), - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/rename_child11", "CLONE", "c11_uuid"), + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, renameParent1).isEmpty() // Test Child11 parentChildInfo with newName assert !child11Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") assert child11Table.definitionMetadata.get("random_key").asText() == "random_value" JSONAssert.assertEquals(child11Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/rename_parent1","relationType":"CLONE","uuid":"p1_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, renameChild11).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, renameChild11) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, renameChild11) == [new ParentInfoDto("hive-metastore/iceberg_db/rename_parent1", "CLONE", "p1_uuid")] as Set //get the child oldName should fail as it no longer exists when: @@ -2529,7 +2530,7 @@ class MetacatSmokeSpec extends Specification { // Test parent1 Table assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, renameParent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, renameParent1).isEmpty() @@ -2551,7 +2552,7 @@ class MetacatSmokeSpec extends Specification { // Test parent1 Table still only have child12 assert parent1Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, renameParent1) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child12", "CLONE", "c12_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, renameParent1).isEmpty() @@ -2583,17 +2584,17 @@ class MetacatSmokeSpec extends Specification { // Test Parent 2 parentChildInfo assert parent2Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent2) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent2).isEmpty() // Test Child21 parentChildInfo assert !child21Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child21Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child21).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child21) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent2", "CLONE", "p2_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child21) == [new ParentInfoDto("hive-metastore/iceberg_db/parent2", "CLONE", "p2_uuid")] as Set /* Step 22: update parent2 with random parentChildRelationInfo to test immutability @@ -2608,16 +2609,16 @@ class MetacatSmokeSpec extends Specification { then: assert parent2Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent2) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent2).isEmpty() // Test Child21 parentChildInfo assert !child21Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child21Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child21).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child21) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent2", "CLONE", "p2_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child21) == [new ParentInfoDto("hive-metastore/iceberg_db/parent2", "CLONE", "p2_uuid")] as Set /* Step 23: update child21 with random parentChildRelationInfo to test immutability @@ -2633,16 +2634,16 @@ class MetacatSmokeSpec extends Specification { // Test Parent 2 parentChildInfo assert parent2Table.definitionMetadata.get("parentChildRelationInfo").get("isParent").booleanValue() assert parentChildRelV1.getChildren(catalogName, databaseName, parent2) == [ - new ChildInfoDto("embedded-fast-hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") + new ChildInfoDto("hive-metastore/iceberg_db/child21", "CLONE", "c21_uuid") ] as Set assert parentChildRelV1.getParents(catalogName, databaseName, parent2).isEmpty() // Test Child21 parentChildInfo assert !child21Table.definitionMetadata.get("parentChildRelationInfo").has("isParent") JSONAssert.assertEquals(child21Table.definitionMetadata.get("parentChildRelationInfo").toString(), - '{"parentInfos":[{"name":"embedded-fast-hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', + '{"parentInfos":[{"name":"hive-metastore/iceberg_db/parent2","relationType":"CLONE","uuid":"p2_uuid"}]}', false) assert parentChildRelV1.getChildren(catalogName, databaseName, child21).isEmpty() - assert parentChildRelV1.getParents(catalogName, databaseName, child21) == [new ParentInfoDto("embedded-fast-hive-metastore/iceberg_db/parent2", "CLONE", "p2_uuid")] as Set + assert parentChildRelV1.getParents(catalogName, databaseName, child21) == [new ParentInfoDto("hive-metastore/iceberg_db/parent2", "CLONE", "p2_uuid")] as Set } } diff --git a/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeThriftSpec.groovy b/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeThriftSpec.groovy index 47dd6a76f..5f75ec207 100644 --- a/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeThriftSpec.groovy +++ b/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeThriftSpec.groovy @@ -145,8 +145,8 @@ class MetacatSmokeThriftSpec extends Specification { where: catalogName | client 'remote' | remoteHiveClient - 'local' | localHiveClient - 'localfast' | localFastHiveClient +// 'local' | localHiveClient +// 'localfast' | localFastHiveClient } @Unroll @@ -166,8 +166,8 @@ class MetacatSmokeThriftSpec extends Specification { where: catalogName | client 'remote' | remoteHiveClient - 'local' | localHiveClient - 'localfast' | localFastHiveClient +// 'local' | localHiveClient +// 'localfast' | localFastHiveClient } @Unroll @@ -197,14 +197,14 @@ class MetacatSmokeThriftSpec extends Specification { then: (catalogName == 'localfast' && exceptionThrown) || (catalogName == 'local' && exceptionThrown) - || (catalogName == 'remote' && !exceptionThrown) + || (catalogName == 'remote' && exceptionThrown) cleanup: client.dropDatabase(databaseName) where: catalogName | client - 'local' | localHiveClient - 'localfast' | localFastHiveClient - // 'remote' | remoteHiveClient +// 'local' | localHiveClient +// 'localfast' | localFastHiveClient + 'remote' | remoteHiveClient } @Unroll @@ -224,8 +224,8 @@ class MetacatSmokeThriftSpec extends Specification { where: catalogName | client 'remote' | remoteHiveClient - 'local' | localHiveClient - 'localfast' | localFastHiveClient +// 'local' | localHiveClient +// 'localfast' | localFastHiveClient } @Unroll @@ -256,8 +256,8 @@ class MetacatSmokeThriftSpec extends Specification { where: catalogName | client 'remote' | remoteHiveClient - 'local' | localHiveClient - 'localfast' | localFastHiveClient +// 'local' | localHiveClient +// 'localfast' | localFastHiveClient } @Unroll @@ -287,8 +287,8 @@ class MetacatSmokeThriftSpec extends Specification { where: catalogName | client 'remote' | remoteHiveClient - 'local' | localHiveClient - 'localfast' | localFastHiveClient +// 'local' | localHiveClient +// 'localfast' | localFastHiveClient } @Unroll @@ -322,8 +322,8 @@ class MetacatSmokeThriftSpec extends Specification { where: catalogName | client 'remote' | remoteHiveClient - 'local' | localHiveClient - 'localfast' | localFastHiveClient +// 'local' | localHiveClient +// 'localfast' | localFastHiveClient } @Unroll @@ -480,148 +480,54 @@ class MetacatSmokeThriftSpec extends Specification { } } - @Unroll - def "Test: Embedded Thrift connector: get partitions for filter #filter returned #result partitions"() { - when: - def catalogName = 'local' - def client = localHiveClient - def databaseName = 'test_db5_' + catalogName - def tableName = 'parts' - def hiveTable = createTable(client, catalogName, databaseName, tableName) - if (cursor == 'start') { - def uri = isLocalEnv ? 'file:/tmp/abc' : null; - def dto = converter.toTableDto(hiveConverter.toTableInfo(QualifiedName.ofTable(catalogName, databaseName, tableName), hiveTable.getTTable())) - def partitionDtos = DataDtoProvider.getPartitions(catalogName, databaseName, tableName, 'one=xyz/total=1', uri, 10) - def partitions = partitionDtos.collect { - new Partition(hiveTable, hiveConverter.fromPartitionInfo(converter.fromTableDto(dto), converter.fromPartitionDto(it))) - } - client.alterPartitions(databaseName + '.' + tableName, partitions) - } - then: - try { - client.getPartitionsByFilter(hiveTable, filter).size() == result - } catch (Exception e) { - result == -1 - e.message.contains('400 Bad Request') - } - cleanup: - if (cursor == 'end') { - def partitionNames = client.getPartitionNames(databaseName, tableName, (short) -1) - partitionNames.each { - client.dropPartition(databaseName, tableName, Lists.newArrayList(PartitionUtil.getPartitionKeyValues(it).values()), false) - } - } - where: - cursor | filter | result - 'start' | "one='xyz'" | 10 - '' | 'one="xyz"' | 10 - '' | "one='xyz' and one like 'xy_'" | 10 - '' | "(one='xyz') and one like 'xy%'" | 10 - '' | "one like 'xy%'" | 10 - '' | "total=10" | 1 - '' | "total='10'" | 1 - '' | "total<1" | 0 - '' | "total>1" | 10 - '' | "total>=10" | 10 - '' | "total<=20" | 10 - '' | "total between 1 and 20" | 10 - '' | "total not between 1 and 20" | 0 - '' | 'one=xyz' | -1 - '' | 'invalid=xyz' | -1 - 'end' | "one='xyz' and (total=11 or total=12)" | 2 - } - - @Unroll - def "Test: Embedded Fast Thrift connector: get partitions for filter #filter returned #result partitions"() { - when: - def catalogName = 'localfast' - def client = localFastHiveClient - def databaseName = 'test_db5_' + catalogName - def tableName = 'parts' - def hiveTable = createTable(client, catalogName, databaseName, tableName) - if (cursor == 'start') { - def uri = isLocalEnv ? 'file:/tmp/abc' : null; - def dto = converter.toTableDto(hiveConverter.toTableInfo(QualifiedName.ofTable(catalogName, databaseName, tableName), hiveTable.getTTable())) - def partitionDtos = DataDtoProvider.getPartitions(catalogName, databaseName, tableName, 'one=xyz/total=1', uri, 10) - def partitions = partitionDtos.collect { - new Partition(hiveTable, hiveConverter.fromPartitionInfo(converter.fromTableDto(dto), converter.fromPartitionDto(it))) - } - client.alterPartitions(databaseName + '.' + tableName, partitions) - } - then: - try { - client.getPartitionsByFilter(hiveTable, filter).size() == result - } catch (Exception e) { - result == -1 - e.message.contains('400 Bad Request') - } - cleanup: - if (cursor == 'end') { - def partitionNames = client.getPartitionNames(databaseName, tableName, (short) -1) - partitionNames.each { - client.dropPartition(databaseName, tableName, Lists.newArrayList(PartitionUtil.getPartitionKeyValues(it).values()), false) - } - } - where: - cursor | filter | result - 'start' | "one='xyz'" | 10 - '' | 'one="xyz"' | 10 - '' | "one='xyz' and one like 'xy_'" | 10 - '' | "(one='xyz') and one like 'xy%'" | 10 - '' | "one like 'xy%'" | 10 - '' | "total=10" | 1 - '' | "total='10'" | 1 - '' | "total<1" | 0 - '' | "total>1" | 10 - '' | "total>=10" | 10 - '' | "total<=20" | 10 - '' | "total between 1 and 20" | 10 - '' | "total not between 1 and 20" | 0 - '' | 'one=xyz' | -1 - '' | 'invalid=xyz' | -1 - 'end' | "one='xyz' and (total=11 or total=12)" | 2 - } - - @Unroll - def "Test: Embedded Fast Thrift connector: getPartitionsByNames with escape values"() { - given: - def catalogName = 'localfast' - def client = localFastHiveClient - def databaseName = 'test_db5_' + catalogName - def tableName = 'parts' - def hiveTable = createTable(client, catalogName, databaseName, tableName) - def uri = isLocalEnv ? 'file:/tmp/abc' : null; - def dto = converter.toTableDto(hiveConverter.toTableInfo(QualifiedName.ofTable(catalogName, databaseName, tableName), hiveTable.getTTable())) - def partitionDtos = DataDtoProvider.getPartitions(catalogName, databaseName, tableName, 'one=xy^:z/total=1', uri, 10) - def partitions = partitionDtos.collect { - new Partition(hiveTable, hiveConverter.fromPartitionInfo(converter.fromTableDto(dto), converter.fromPartitionDto(it))) - } - client.alterPartitions(databaseName + '.' + tableName, partitions) - when: - def result = client.getPartitionsByNames(hiveTable, ['one=xy%5E%3Az/total=10']) - then: - result.size() == 1 - result.get(0).getValues() == ['xy^:z', '10'] - when: - result = client.getPartitionsByNames(hiveTable, ['one=xy^:z/total=10']) - then: - result.size() == 0 - when: - result = client.getPartitionsByNames(hiveTable, ['total':'10']) - then: - result.size() == 1 - result.get(0).getValues() == ['xy^:z', '10'] - when: - result = client.getPartitionsByNames(hiveTable, ['one':'xy^:z']) - then: - result.size() == 10 - when: - result = client.getPartitionsByNames(hiveTable, ['one':'xy%5E%3Az']) - then: - result.size() == 0 - cleanup: - client.getPartitions(hiveTable).each { - client.dropPartition(databaseName, tableName, it.getValues(), false) - } - } +// @Unroll +// def "Test: Embedded Thrift connector: get partitions for filter #filter returned #result partitions"() { +// when: +// def catalogName = 'local' +// def client = localHiveClient +// def databaseName = 'test_db5_' + catalogName +// def tableName = 'parts' +// def hiveTable = createTable(client, catalogName, databaseName, tableName) +// if (cursor == 'start') { +// def uri = isLocalEnv ? 'file:/tmp/abc' : null; +// def dto = converter.toTableDto(hiveConverter.toTableInfo(QualifiedName.ofTable(catalogName, databaseName, tableName), hiveTable.getTTable())) +// def partitionDtos = DataDtoProvider.getPartitions(catalogName, databaseName, tableName, 'one=xyz/total=1', uri, 10) +// def partitions = partitionDtos.collect { +// new Partition(hiveTable, hiveConverter.fromPartitionInfo(converter.fromTableDto(dto), converter.fromPartitionDto(it))) +// } +// client.alterPartitions(databaseName + '.' + tableName, partitions) +// } +// then: +// try { +// client.getPartitionsByFilter(hiveTable, filter).size() == result +// } catch (Exception e) { +// result == -1 +// e.message.contains('400 Bad Request') +// } +// cleanup: +// if (cursor == 'end') { +// def partitionNames = client.getPartitionNames(databaseName, tableName, (short) -1) +// partitionNames.each { +// client.dropPartition(databaseName, tableName, Lists.newArrayList(PartitionUtil.getPartitionKeyValues(it).values()), false) +// } +// } +// where: +// cursor | filter | result +// 'start' | "one='xyz'" | 10 +// '' | 'one="xyz"' | 10 +// '' | "one='xyz' and one like 'xy_'" | 10 +// '' | "(one='xyz') and one like 'xy%'" | 10 +// '' | "one like 'xy%'" | 10 +// '' | "total=10" | 1 +// '' | "total='10'" | 1 +// '' | "total<1" | 0 +// '' | "total>1" | 10 +// '' | "total>=10" | 10 +// '' | "total<=20" | 10 +// '' | "total between 1 and 20" | 10 +// '' | "total not between 1 and 20" | 0 +// '' | 'one=xyz' | -1 +// '' | 'invalid=xyz' | -1 +// 'end' | "one='xyz' and (total=11 or total=12)" | 2 +// } } diff --git a/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatThriftFunctionalSpec.groovy b/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatThriftFunctionalSpec.groovy index 7d23e9a41..c31bb65e9 100644 --- a/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatThriftFunctionalSpec.groovy +++ b/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatThriftFunctionalSpec.groovy @@ -72,15 +72,15 @@ class MetacatThriftFunctionalSpec extends Specification { assert thriftPort, 'Required system property "metacat_hive_thrift_port" is not set' TestCatalogs.findByCatalogName('hive-metastore').thriftUri = "thrift://localhost:${thriftPort}".toString() - Logger.getRootLogger().setLevel(Level.OFF) - thriftPort = System.properties['metacat_embedded_hive_thrift_port']?.toString()?.trim() - assert thriftPort, 'Required system property "metacat_embedded_hive_thrift_port" is not set' - TestCatalogs.findByCatalogName('embedded-hive-metastore').thriftUri = "thrift://localhost:${thriftPort}".toString() - - Logger.getRootLogger().setLevel(Level.OFF) - thriftPort = System.properties['metacat_embedded_fast_hive_thrift_port']?.toString()?.trim() - assert thriftPort, 'Required system property "metacat_embedded_fast_hive_thrift_port" is not set' - TestCatalogs.findByCatalogName('embedded-fast-hive-metastore').thriftUri = "thrift://localhost:${thriftPort}".toString() +// Logger.getRootLogger().setLevel(Level.OFF) +// thriftPort = System.properties['metacat_embedded_hive_thrift_port']?.toString()?.trim() +// assert thriftPort, 'Required system property "metacat_embedded_hive_thrift_port" is not set' +// TestCatalogs.findByCatalogName('embedded-hive-metastore').thriftUri = "thrift://localhost:${thriftPort}".toString() +// +// Logger.getRootLogger().setLevel(Level.OFF) +// thriftPort = System.properties['metacat_embedded_fast_hive_thrift_port']?.toString()?.trim() +// assert thriftPort, 'Required system property "metacat_embedded_fast_hive_thrift_port" is not set' +// TestCatalogs.findByCatalogName('embedded-fast-hive-metastore').thriftUri = "thrift://localhost:${thriftPort}".toString() thriftPort = System.properties['hive_thrift_port']?.toString()?.trim() assert thriftPort, 'Required system property "hive_thrift_port" is not set' diff --git a/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/TestCatalogs.groovy b/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/TestCatalogs.groovy index 275db2286..eeb42499b 100644 --- a/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/TestCatalogs.groovy +++ b/metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/TestCatalogs.groovy @@ -51,38 +51,13 @@ class TestCatalogs { createTable: true, deleteDatabase: true, deleteTable: true, - name: 'embedded-hive-metastore', - partitionKeysAppearLast: true, - type: 'hive', - createView: true, - validateFilterExpressionBasedOnPartitionKeyType: false, - supportAUDITtables: true - ), - new TestCatalog( - createDatabase: true, - createPartition: true, - createTable: true, - deleteDatabase: true, - deleteTable: true, - name: 'embedded-fast-hive-metastore', + name: 'hive-metastore', partitionKeysAppearLast: true, type: 'hive', createView: true, - validateWithHive: false, validateFilterExpressionBasedOnPartitionKeyType: false, supportAUDITtables: true ), - new TestCatalog( - createDatabase: true, - createPartition: true, - createTable: true, - deleteDatabase: true, - deleteTable: true, - name: 'hive-metastore', - partitionKeysAppearLast: true, - type: 'hive', - createView: true - ), new TestCatalog( createDatabase: true, createPartition: true,