diff --git a/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index d3c85832b7ab..cd62ad1688c3 100644 --- a/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -493,6 +493,8 @@ public enum ErrorMsg { ICEBERG_COMPACTION_WITH_PART_SPEC_AND_FILTER_NOT_SUPPORTED(10441, "Compaction command with both partition spec and filter is not supported on Iceberg table {0}.{1}", true), COMPACTION_THREAD_INITIALIZATION(10442, "Compaction thread failed during initialization", false), ALTER_TABLE_COMPACTION_NON_PARTITIONED_COLUMN_NOT_ALLOWED(10443, "Filter expression can contain only partition columns."), + CATALOG_ALREADY_EXISTS(10444, "Catalog {0} already exists", true), + CATALOG_NOT_EXISTS(10445, "Catalog {0} does not exists:", true), //========================== 20000 range starts here ========================// diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java index 5cc12e6cec92..15720d03fe11 100644 --- a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java +++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java @@ -104,6 +104,7 @@ public class PerfLogger { public static final String STATS_TASK = "StatsTask"; public static final String HIVE_GET_TABLE = "getTablesByType"; + public static final String HIVE_GET_CATALOG = "getCatalog"; public static final String HIVE_GET_DATABASE = "getDatabase"; public static final String HIVE_GET_DATABASE_2 = "getDatabase2"; public static final String HIVE_GET_PARTITIONS = "getPartitions"; diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g index 3e528476f51e..d75e6cecab21 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g @@ -53,6 +53,7 @@ alterStatement | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix | KW_ALTER KW_DATACONNECTOR alterDataConnectorStatementSuffix -> alterDataConnectorStatementSuffix | KW_OPTIMIZE KW_TABLE tableName optimizeTableStatementSuffix -> ^(TOK_ALTERTABLE tableName optimizeTableStatementSuffix) + | KW_ALTER KW_CATALOG alterCatalogStatementSuffix -> alterCatalogStatementSuffix ; alterTableStatementSuffix @@ -155,6 +156,19 @@ alterMaterializedViewSuffixRebuild[CommonTree tableNameTree] : KW_REBUILD -> ^(TOK_ALTER_MATERIALIZED_VIEW_REBUILD {$tableNameTree}) ; +alterCatalogStatementSuffix +@init { gParent.pushMsg("alter catalog statement", state); } +@after { gParent.popMsg(state); } + : alterCatalogSuffixSetLocation + ; + +alterCatalogSuffixSetLocation +@init { gParent.pushMsg("alter catalog set location", state); } +@after { gParent.popMsg(state); } + : catName=identifier KW_SET KW_LOCATION newLocation=StringLiteral + -> ^(TOK_ALTERCATALOG_LOCATION $catName $newLocation) + ; + alterDatabaseStatementSuffix @init { gParent.pushMsg("alter database statement", state); } @after { gParent.popMsg(state); } diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g index 3d8b4ab7741a..bf13e8c4ea98 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g @@ -216,6 +216,8 @@ KW_INTERSECT: 'INTERSECT'; KW_VIEW: 'VIEW'; KW_VIEWS: 'VIEWS'; KW_IN: 'IN'; +KW_CATALOG: 'CATALOG'; +KW_CATALOGS: 'CATALOGS'; KW_DATABASE: 'DATABASE'; KW_DATABASES: 'DATABASES'; KW_MATERIALIZED: 'MATERIALIZED'; diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 497d2928a3bf..1f7acf2f7dd6 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -157,6 +157,7 @@ TOK_STRUCT; TOK_MAP; TOK_UNIONTYPE; TOK_COLTYPELIST; +TOK_CREATECATALOG; TOK_CREATEDATABASE; TOK_CREATEDATACONNECTOR; TOK_CREATETABLE; @@ -229,6 +230,7 @@ TOK_RETAIN; TOK_WITH_SNAPSHOT_RETENTION; TOK_ALTERTABLE_CONVERT; TOK_MSCK; +TOK_SHOWCATALOGS; TOK_SHOWDATABASES; TOK_SHOWDATACONNECTORS; TOK_SHOWTABLES; @@ -246,6 +248,7 @@ TOK_UNLOCKTABLE; TOK_LOCKDB; TOK_UNLOCKDB; TOK_SWITCHDATABASE; +TOK_DROPCATALOG; TOK_DROPDATABASE; TOK_DROPTABLE; TOK_DATABASECOMMENT; @@ -369,6 +372,10 @@ TOK_SHOW_ROLES; TOK_SHOW_CURRENT_ROLE; TOK_SHOW_ROLE_PRINCIPALS; TOK_SHOWDBLOCKS; +TOK_DESCCATALOG; +TOK_CATALOGLOCATION; +TOK_CATALOGCOMMENT; +TOK_ALTERCATALOG_LOCATION; TOK_DESCDATABASE; TOK_DATABASEPROPERTIES; TOK_DATABASELOCATION; @@ -1000,7 +1007,9 @@ importStatement ddlStatement @init { pushMsg("ddl statement", state); } @after { popMsg(state); } - : createDatabaseStatement + : createCatalogStatement + | dropCatalogStatement + | createDatabaseStatement | switchDatabaseStatement | dropDatabaseStatement | createTableStatement @@ -1102,6 +1111,38 @@ orReplace -> ^(TOK_ORREPLACE) ; +createCatalogStatement +@init { pushMsg("create catalog statement", state); } +@after { popMsg(state); } + : KW_CREATE KW_CATALOG + ifNotExists? + name=identifier + catLocation + catalogComment? + -> ^(TOK_CREATECATALOG $name catLocation ifNotExists? catalogComment?) + ; + +catLocation +@init { pushMsg("catalog location specification", state); } +@after { popMsg(state); } + : + KW_LOCATION locn=StringLiteral -> ^(TOK_CATALOGLOCATION $locn) + ; + +catalogComment +@init { pushMsg("catalog's comment", state); } +@after { popMsg(state); } + : KW_COMMENT comment=StringLiteral + -> ^(TOK_CATALOGCOMMENT $comment) + ; + +dropCatalogStatement +@init { pushMsg("drop catalog statement", state); } +@after { popMsg(state); } + : KW_DROP KW_CATALOG ifExists? identifier + -> ^(TOK_DROPCATALOG identifier ifExists?) + ; + createDatabaseStatement @init { pushMsg("create database statement", state); } @after { popMsg(state); } @@ -1233,6 +1274,8 @@ descStatement : (KW_DESCRIBE|KW_DESC) ( + (KW_CATALOG) => (KW_CATALOG) KW_EXTENDED? (catName=identifier) -> ^(TOK_DESCCATALOG $catName KW_EXTENDED?) + | (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?) | (KW_DATACONNECTOR) => (KW_DATACONNECTOR) KW_EXTENDED? (dcName=identifier) -> ^(TOK_DESCDATACONNECTOR $dcName KW_EXTENDED?) @@ -1261,7 +1304,8 @@ analyzeStatement showStatement @init { pushMsg("show statement", state); } @after { popMsg(state); } - : KW_SHOW (KW_DATABASES|KW_SCHEMAS) (KW_LIKE showStmtIdentifier)? -> ^(TOK_SHOWDATABASES showStmtIdentifier?) + : KW_SHOW KW_CATALOGS (KW_LIKE showStmtIdentifier)? -> ^(TOK_SHOWCATALOGS showStmtIdentifier?) + | KW_SHOW (KW_DATABASES|KW_SCHEMAS) (KW_LIKE showStmtIdentifier)? -> ^(TOK_SHOWDATABASES showStmtIdentifier?) | KW_SHOW (isExtended=KW_EXTENDED)? KW_TABLES ((KW_FROM|KW_IN) db_name=identifier)? (filter=showTablesFilterExpr)? -> ^(TOK_SHOWTABLES (TOK_FROM $db_name)? $filter? $isExtended?) | KW_SHOW KW_VIEWS ((KW_FROM|KW_IN) db_name=identifier)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)? -> ^(TOK_SHOWVIEWS (TOK_FROM $db_name)? showStmtIdentifier?) diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g index 3d219dbe294a..7f08cb8828ea 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g @@ -973,7 +973,7 @@ nonReserved : KW_ABORT | KW_ADD | KW_ADMIN | KW_AFTER | KW_ANALYZE | KW_ARCHIVE | KW_ASC | KW_BEFORE | KW_BUCKET | KW_BUCKETS | KW_CASCADE | KW_CBO | KW_CHANGE | KW_CHECK | KW_CLUSTER | KW_CLUSTERED | KW_CLUSTERSTATUS | KW_COLLECTION | KW_COLUMNS - | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | KW_CONTINUE | KW_COST | KW_DATA | KW_DAY + | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | KW_CONTINUE | KW_COST | KW_DATA | KW_DAY | KW_CATALOG | KW_CATALOGS | KW_DATABASES | KW_DATETIME | KW_DBPROPERTIES | KW_DCPROPERTIES | KW_DEFERRED | KW_DEFINED | KW_DELIMITED | KW_DEPENDENCY | KW_DESC | KW_DIRECTORIES | KW_DIRECTORY | KW_DISABLE | KW_DISTRIBUTE | KW_DISTRIBUTED | KW_DOW | KW_ELEM_TYPE | KW_ENABLE | KW_ENFORCED | KW_ESCAPED | KW_EXCLUSIVE | KW_EXPLAIN | KW_EXPORT | KW_FIELDS | KW_FILE | KW_FILEFORMAT diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogAnalyzer.java new file mode 100644 index 000000000000..414e9b572312 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogAnalyzer.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog alteration commands. + */ +public abstract class AbstractAlterCatalogAnalyzer extends BaseSemanticAnalyzer { + public AbstractAlterCatalogAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + AbstractAlterCatalogDesc alterDesc = buildAlterCatalogDesc(root); + Catalog catalog = getCatalog(alterDesc.getCatalogName()); + outputs.add(new WriteEntity(catalog, WriteEntity.WriteType.DDL_NO_LOCK)); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); + } + + protected abstract AbstractAlterCatalogDesc buildAlterCatalogDesc(ASTNode root) + throws SemanticException; +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogDesc.java new file mode 100644 index 000000000000..d144b2103f4d --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogDesc.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +import java.io.Serializable; + +/** + * DDL task description for ALTER CATALOG commands. + */ +public abstract class AbstractAlterCatalogDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + private final String catalogName; + + public AbstractAlterCatalogDesc(String catalogName) { + this.catalogName = catalogName; + } + + @Explain(displayName="name", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogOperation.java new file mode 100644 index 000000000000..a78693e292e2 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogOperation.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of altering a catalog. + */ +public abstract class AbstractAlterCatalogOperation extends DDLOperation { + public AbstractAlterCatalogOperation(DDLOperationContext context, T desc) { + super(context, desc); + } + + @Override + public int execute() throws Exception { + String catalogName = desc.getCatalogName(); + Catalog catalog = context.getDb().getMSC().getCatalog(catalogName); + if (catalog == null) { + throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + doAlteration(catalog); + + context.getDb().alterCatalog(catalogName, catalog); + return 0; + } + + protected abstract void doAlteration(Catalog catalog) throws HiveException; +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationAnalyzer.java new file mode 100644 index 000000000000..ac380ae44ac9 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationAnalyzer.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter.location; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogAnalyzer; +import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogDesc; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog set location commands. + */ +@DDLType(types = HiveParser.TOK_ALTERCATALOG_LOCATION) +public class AlterCatalogSetLocationAnalyzer extends AbstractAlterCatalogAnalyzer { + public AlterCatalogSetLocationAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected AbstractAlterCatalogDesc buildAlterCatalogDesc(ASTNode root) throws SemanticException { + String catalogName = getUnescapedName((ASTNode) root.getChild(0)); + String newLocation = unescapeSQLString(root.getChild(1).getText()); + + outputs.add(toWriteEntity(newLocation)); + + return new AlterCatalogSetLocationDesc(catalogName, newLocation); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationDesc.java new file mode 100644 index 000000000000..22a4034ea48b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationDesc.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter.location; + +import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +/** + * DDL task description for ALTER CATALOG ... SET LOCATION ... commands. + */ +@Explain(displayName = "Set Catalog Location", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) +public class AlterCatalogSetLocationDesc extends AbstractAlterCatalogDesc { + private final String location; + + public AlterCatalogSetLocationDesc(String catalogName, String location) { + super(catalogName); + this.location = location; + } + + @Explain(displayName="location") + public String getLocation() { + return location; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java new file mode 100644 index 000000000000..d2974f58bb50 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter.location; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogOperation; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +import java.net.URI; +import java.net.URISyntaxException; + +/** + * Operation process of altering a catalog's location. + */ +public class AlterCatalogSetLocationOperation extends AbstractAlterCatalogOperation { + public AlterCatalogSetLocationOperation(DDLOperationContext context, AlterCatalogSetLocationDesc desc) { + super(context, desc); + } + + @Override + protected void doAlteration(Catalog catalog) throws HiveException { + try { + String newLocation = Utilities.getQualifiedPath(context.getConf(), new Path(desc.getLocation())); + + URI locationURI = new URI(newLocation); + if (!locationURI.isAbsolute()) { + throw new HiveException(ErrorMsg.BAD_LOCATION_VALUE, newLocation); + } + + if (newLocation.equals(catalog.getLocationUri())) { + LOG.info("AlterCatalog skipped. No change in location."); + } else { + catalog.setLocationUri(newLocation); + LOG.info("Catalog location changed from {} to {}", catalog.getLocationUri(), newLocation); + } + } catch (URISyntaxException e) { + throw new HiveException(e); + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogAnalyzer.java new file mode 100644 index 000000000000..4357f7cf465d --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogAnalyzer.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.create; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog creation commands. + */ +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_CREATECATALOG) +public class CreateCatalogAnalyzer extends BaseSemanticAnalyzer { + public CreateCatalogAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String catalogName = unescapeIdentifier(root.getChild(0).getText()); + String locationUrl = unescapeSQLString(root.getChild(1).getChild(0).getText()); + outputs.add(toWriteEntity(locationUrl)); + + boolean ifNotExists = false; + String comment = null; + + for (int i = 2; i < root.getChildCount(); i++) { + ASTNode childNode = (ASTNode) root.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_IFNOTEXISTS: + ifNotExists = true; + break; + case HiveParser.TOK_CATALOGCOMMENT: + comment = unescapeSQLString(childNode.getChild(0).getText()); + break; + default: + throw new SemanticException("Unrecognized token in CREATE CATALOG statement"); + } + } + + CreateCatalogDesc desc = new CreateCatalogDesc(catalogName, comment, locationUrl, ifNotExists); + Catalog catalog = new Catalog(catalogName, locationUrl); + + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + outputs.add(new WriteEntity(catalog, WriteEntity.WriteType.DDL_NO_LOCK)); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogDesc.java new file mode 100644 index 000000000000..030c4a8a6f71 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogDesc.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.create; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +import java.io.Serializable; + +/** + * DDL task description for CREATE CATALOG commands. + */ +@Explain(displayName = "Create CATALOG", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) +public class CreateCatalogDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + private final String catalogName; + private final String comment; + private final String locationUri; + private final boolean ifNotExists; + + public CreateCatalogDesc(String catalogName, String comment, String locationUri, boolean ifNotExists) { + this.catalogName = catalogName; + this.comment = comment; + this.locationUri = locationUri; + this.ifNotExists = ifNotExists; + } + + @Explain(displayName="name", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) + public String getName() { + return catalogName; + } + + @Explain(displayName="comment") + public String getComment() { + return comment; + } + + @Explain(displayName="locationUri") + public String getLocationUri() { + return locationUri; + } + + @Explain(displayName="if not exists", displayOnlyOnTrue = true) + public boolean isIfNotExists() { + return ifNotExists; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogOperation.java new file mode 100644 index 000000000000..8b3db7cc5371 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogOperation.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.create; + +import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of creating a catalog. + */ +public class CreateCatalogOperation extends DDLOperation { + public CreateCatalogOperation(DDLOperationContext context, CreateCatalogDesc desc) { + super(context, desc); + } + + @Override + public int execute() throws Exception { + Catalog catalog = new Catalog(desc.getName(), desc.getLocationUri()); + catalog.setDescription(desc.getComment()); + + try { + context.getDb().createCatalog(catalog, desc.isIfNotExists()); + } catch (AlreadyExistsException e) { + throw new HiveException(e, ErrorMsg.CATALOG_ALREADY_EXISTS, desc.getName()); + } + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogAnalyzer.java new file mode 100644 index 000000000000..c00d7bf105b0 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogAnalyzer.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.desc; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog description commands. + */ +@DDLType(types = HiveParser.TOK_DESCCATALOG) +public class DescCatalogAnalyzer extends BaseSemanticAnalyzer { + public DescCatalogAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + if (root.getChildCount() == 0 || root.getChildCount() > 2) { + throw new SemanticException("Unexpected Tokens at DESCRIBE CATALOG"); + } + + ctx.setResFile(ctx.getLocalTmpPath()); + + String catalogName = root.getChild(0).getText(); + boolean isExtended = root.getChildCount() == 2; + + inputs.add(new ReadEntity(getCatalog(catalogName))); + + DescCatalogDesc desc = new DescCatalogDesc(ctx.getResFile(), catalogName, isExtended); + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(desc.getSchema())); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogDesc.java new file mode 100644 index 000000000000..0f5e9eaf9b26 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogDesc.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.desc; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +import java.io.Serializable; + +/** + * DDL task description for DESC CATALOG commands. + */ +@Explain(displayName = "Describe Catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) +public class DescCatalogDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + public static final String DESC_CATALOG_SCHEMA = "cat_name,comment,location#string:string:string"; + + public static final String DESC_CATALOG_SCHEMA_EXTENDED = "cat_name,comment,location,create_time#string:string:string:string"; + + private final String resFile; + private final String catName; + private final boolean isExtended; + + public DescCatalogDesc(Path resFile, String catName, boolean isExtended) { + this.resFile = resFile.toString(); + this.catName = catName; + this.isExtended = isExtended; + } + + @Explain(displayName = "result file", explainLevels = { Explain.Level.EXTENDED }) + public String getResFile() { + return resFile; + } + + @Explain(displayName = "catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) + public String getCatName() { + return catName; + } + + @Explain(displayName = "extended", displayOnlyOnTrue=true, + explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) + public boolean isExtended() { + return isExtended; + } + + public String getSchema() { + return isExtended ? DESC_CATALOG_SCHEMA_EXTENDED : DESC_CATALOG_SCHEMA; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogFormatter.java new file mode 100644 index 000000000000..c89e23b5c34f --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogFormatter.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.desc; + +import org.apache.hadoop.hive.common.type.CalendarUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ddl.ShowUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.formatting.MapBuilder; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; +import org.apache.hive.common.util.HiveStringUtils; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +/** + * Formats DESC CATALOG results. + */ +abstract class DescCatalogFormatter { + static DescCatalogFormatter getFormatter(HiveConf hiveConf) { + if (MetaDataFormatUtils.isJson(hiveConf)) { + return new JsonDescCatalogFormatter(); + } + return new TextDescCatalogFormatter(); + } + + abstract void showCatalogDescription(DataOutputStream out, String catalog, String comment, String location, + int createTime) throws HiveException; + + // ------ Implementations ------ + static class JsonDescCatalogFormatter extends DescCatalogFormatter { + @Override + void showCatalogDescription(DataOutputStream out, String catalog, String comment, String location, + int createTime) throws HiveException { + MapBuilder builder = MapBuilder.create() + .put("catalog", catalog) + .put("comment", comment) + .put("location", location); + if (createTime != 0) { + builder.put("createTime", CalendarUtils.formatTimestamp((long) createTime * 1000, true)); + } + ShowUtils.asJson(out, builder.build()); + } + } + + static class TextDescCatalogFormatter extends DescCatalogFormatter { + @Override + void showCatalogDescription(DataOutputStream out, String catalog, String comment, String location, + int createTime) throws HiveException { + try { + writeLine(out, "Catalog Name", catalog); + if (comment != null) { + writeLine(out, "Comment", HiveStringUtils.escapeJava(comment)); + } + if (location != null) { + writeLine(out, "Location", location); + } + if (createTime != 0) { + String createTimeStr = CalendarUtils.formatTimestamp((long) createTime * 1000, true); + writeLine(out, "CreateTime", createTimeStr); + } + } catch (IOException e) { + throw new HiveException(e); + } + } + + private void writeLine(DataOutputStream out, String label, String value) throws IOException { + out.write(label.getBytes(StandardCharsets.UTF_8)); + out.write(Utilities.tabCode); + out.write(value.getBytes(StandardCharsets.UTF_8)); + out.write(Utilities.newLineCode); + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogOperation.java new file mode 100644 index 000000000000..6509830991bd --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogOperation.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.desc; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.ShowUtils; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +import java.io.DataOutputStream; + +/** + * Operation process of describing a catalog. + */ +public class DescCatalogOperation extends DDLOperation { + public DescCatalogOperation(DDLOperationContext context, DescCatalogDesc desc) { + super(context, desc); + } + + @Override + public int execute() throws Exception { + try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) { + Catalog catalog = context.getDb().getMSC().getCatalog(desc.getCatName()); + if (catalog == null) { + throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, desc.getCatName()); + } + int createTime = 0; + if (desc.isExtended()) { + createTime = catalog.getCreateTime(); + } + DescCatalogFormatter formatter = DescCatalogFormatter.getFormatter(context.getConf()); + formatter.showCatalogDescription(outStream, catalog.getName(), catalog.getDescription(), + catalog.getLocationUri(), createTime); + } catch (Exception e) { + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); + } + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogAnalyzer.java new file mode 100644 index 000000000000..e4e5d6d7da9c --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogAnalyzer.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.drop; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog dropping commands. + */ +@DDLType (types = HiveParser.TOK_DROPCATALOG) +public class DropCatalogAnalyzer extends BaseSemanticAnalyzer { + public DropCatalogAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String catalogName = unescapeIdentifier(root.getChild(0).getText()); + boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null; + + Catalog catalog = getCatalog(catalogName, !ifExists); + if (catalog == null) { + return; + } + + // Drop catalog is non-cascaded, so we do not need to add databases to outputs. + inputs.add(new ReadEntity(catalog)); + outputs.add(new WriteEntity(catalog, WriteEntity.WriteType.DDL_NO_LOCK)); + + DropCatalogDesc desc = new DropCatalogDesc(catalogName, ifExists); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogDesc.java new file mode 100644 index 000000000000..348ae8f84a4d --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogDesc.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.drop; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +import java.io.Serializable; + +/** + * DDL task description for DROP CATALOG commands. + */ +@Explain(displayName = "Drop Catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) +public class DropCatalogDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + private final String catalogName; + private final boolean ifExists; + + public DropCatalogDesc(String catalogName, boolean ifExists) { + this.catalogName = catalogName; + this.ifExists = ifExists; + } + + @Explain(displayName = "catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + + @Explain(displayName = "if exists") + public boolean getIfExists() { + return ifExists; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogOperation.java new file mode 100644 index 000000000000..f6574c0995a3 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogOperation.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.drop; + +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of dropping a catalog. + */ +public class DropCatalogOperation extends DDLOperation { + public DropCatalogOperation(DDLOperationContext context, DropCatalogDesc desc) { + super(context, desc); + } + + @Override + public int execute() throws Exception { + String catName = desc.getCatalogName(); + try { + context.getDb().dropCatalog(catName, desc.getIfExists()); + } catch (NoSuchObjectException e) { + throw new HiveException(e, ErrorMsg.CATALOG_NOT_EXISTS, desc.getCatalogName()); + } + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsAnalyzer.java new file mode 100644 index 000000000000..0a1c9ff07a0d --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsAnalyzer.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.show; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for show catalogs commands. + */ +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_SHOWCATALOGS) +public class ShowCatalogsAnalyzer extends BaseSemanticAnalyzer { + public ShowCatalogsAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + if (root.getChildCount() > 1) { + throw new SemanticException("Unexpected Tokens at SHOW CATALOGS"); + } + + ctx.setResFile(ctx.getLocalTmpPath()); + + String catalogPattern = root.getChildCount() == 1 ? unescapeSQLString(root.getChild(0).getText()) : null; + ShowCatalogsDesc desc = new ShowCatalogsDesc(ctx.getResFile(), catalogPattern); + + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(ShowCatalogsDesc.SHOW_CATALOGS_SCHEMA)); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsDesc.java new file mode 100644 index 000000000000..8f4ab47c174b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsDesc.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.show; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +import java.io.Serializable; + +/** + * DDL task description for SHOW CATALOGS commands. + */ +@Explain(displayName = "Show Catalogs", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) +public class ShowCatalogsDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + public static final String SHOW_CATALOGS_SCHEMA = "catalog_name#string"; + + private final String resFile; + private final String pattern; + + public ShowCatalogsDesc(Path resFile, String pattern) { + this.resFile = resFile.toString(); + this.pattern = pattern; + } + + @Explain(displayName = "pattern") + public String getPattern() { + return pattern; + } + + @Explain(displayName = "result file", explainLevels = { Explain.Level.EXTENDED }) + public String getResFile() { + return resFile; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsFormatter.java new file mode 100644 index 000000000000..9625c6cc6358 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsFormatter.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.show; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ddl.ShowUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.formatting.MapBuilder; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; + +/** + * Formats SHOW CATALOGS results. + */ +abstract class ShowCatalogsFormatter { + public static ShowCatalogsFormatter getFormatter(HiveConf conf) { + if (MetaDataFormatUtils.isJson(conf)) { + return new JsonShowCatalogsFormatter(); + } else { + return new TextShowCatalogsFormatter(); + } + } + + abstract void showCatalogs(DataOutputStream out, List catalogs) throws HiveException; + + + // ------ Implementations ------ + + static class JsonShowCatalogsFormatter extends ShowCatalogsFormatter { + @Override + void showCatalogs(DataOutputStream out, List catalogs) throws HiveException { + ShowUtils.asJson(out, MapBuilder.create().put("catalogs", catalogs).build()); + } + } + + static class TextShowCatalogsFormatter extends ShowCatalogsFormatter { + @Override + void showCatalogs(DataOutputStream out, List catalogs) throws HiveException { + try { + for (String catalog : catalogs) { + out.write(catalog.getBytes(StandardCharsets.UTF_8)); + out.write(Utilities.newLineCode); + } + } catch (IOException e) { + throw new HiveException(e); + } + } + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsOperation.java new file mode 100644 index 000000000000..c3011fab3324 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsOperation.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.show; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.ShowUtils; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.udf.UDFLike; +import org.apache.hadoop.io.IOUtils; + +import java.io.DataOutputStream; +import java.util.List; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * Operation process of locking a catalog. + */ +public class ShowCatalogsOperation extends DDLOperation { + + public ShowCatalogsOperation(DDLOperationContext context, ShowCatalogsDesc desc) { + super(context, desc); + } + + @Override public int execute() throws Exception { + List catalogs = context.getDb().getMSC().getCatalogs(); + if (desc.getPattern() != null) { + LOG.debug("pattern: {}", desc.getPattern()); + Pattern pattern = Pattern.compile(UDFLike.likePatternToRegExp(desc.getPattern()), Pattern.CASE_INSENSITIVE); + catalogs = catalogs.stream().filter(name -> pattern.matcher(name).matches()).collect(Collectors.toList()); + } + + LOG.info("Found {} catalog(s) matching the SHOW CATALOGS statement.", catalogs.size()); + + // write the results in the file + DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context); + try { + ShowCatalogsFormatter formatter = ShowCatalogsFormatter.getFormatter(context.getConf()); + formatter.showCatalogs(outStream, catalogs); + } catch (Exception e) { + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show catalogs"); + } finally { + IOUtils.closeStream(outStream); + } + + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java index 72714e6297f9..b85988e55a8c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java @@ -23,6 +23,7 @@ import java.util.Map; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.DataConnector; import org.apache.hadoop.hive.metastore.api.Function; @@ -43,9 +44,14 @@ public class Entity implements Serializable { * The type of the entity. */ public static enum Type { - DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR, FUNCTION, SERVICE_NAME, DATACONNECTOR + DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR, FUNCTION, SERVICE_NAME, DATACONNECTOR, CATALOG } + /** + * The catalog if this is a catalog. + */ + private Catalog catalog; + /** * The database if this is a database. */ @@ -116,6 +122,14 @@ public String getName() { return name; } + public Catalog getCatalog() { + return catalog; + } + + public void setCatalog(Catalog catalog) { + this.catalog = catalog; + } + public Database getDatabase() { return database; } @@ -201,6 +215,13 @@ public Entity() { name = null; } + public Entity(Catalog catalog, boolean complete) { + this.catalog = catalog; + this.typ = Type.CATALOG; + this.name = computeName(); + this.complete = complete; + } + /** * Constructor for a database. * @@ -443,6 +464,8 @@ private String doComputeName() { return stringObject; case DATACONNECTOR: return "connector:" + connector.getName(); + case CATALOG: + return "catalog:" + catalog.getName(); default: return d.toString(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java index c0e3bb30054c..38c0a1419de7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java @@ -20,6 +20,7 @@ import static org.apache.hadoop.hive.ql.hooks.Entity.Type.PARTITION; import static org.apache.hadoop.hive.ql.hooks.Entity.Type.TABLE; +import static org.apache.hadoop.hive.ql.plan.HiveOperation.ALTERCATALOG_LOCATION; import static org.apache.hadoop.hive.ql.plan.HiveOperation.ALTERDATABASE; import static org.apache.hadoop.hive.ql.plan.HiveOperation.ALTERDATABASE_OWNER; import static org.apache.hadoop.hive.ql.plan.HiveOperation.ALTERPARTITION_BUCKETNUM; @@ -151,7 +152,7 @@ public class HiveProtoLoggingHook implements ExecuteWithHookContext { RELOADFUNCTION, CREATEMACRO, DROPMACRO, CREATEVIEW, DROPVIEW, ALTERVIEW_PROPERTIES, LOCKTABLE, UNLOCKTABLE, CREATEROLE, DROPROLE, ALTERTABLE_FILEFORMAT, ALTERPARTITION_FILEFORMAT, ALTERTABLE_LOCATION, ALTERPARTITION_LOCATION, CREATETABLE, - TRUNCATETABLE, CREATETABLE_AS_SELECT, QUERY, ALTERDATABASE, ALTERDATABASE_OWNER, + TRUNCATETABLE, CREATETABLE_AS_SELECT, QUERY, ALTERCATALOG_LOCATION, ALTERDATABASE, ALTERDATABASE_OWNER, ALTERTABLE_MERGEFILES, ALTERPARTITION_MERGEFILES, ALTERTABLE_SKEWED, ALTERTBLPART_SKEWED_LOCATION, ALTERTABLE_PARTCOLTYPE, ALTERTABLE_EXCHANGEPARTITION, ALTERTABLE_DROPCONSTRAINT, ALTERTABLE_ADDCONSTRAINT, ALTERVIEW_RENAME, ALTERVIEW_AS, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java index 68c139fd4716..4c94a5f58ab4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java @@ -25,6 +25,7 @@ import java.util.Set; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.DataConnector; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Function; @@ -76,6 +77,13 @@ public ReadEntity() { super(); } + /** + * Constructor for a catalog. + */ + public ReadEntity(Catalog catalog) { + super(catalog, true); + } + /** * Constructor for a database. */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java index d3dd2c23504f..6563e4609706 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java @@ -20,6 +20,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.DataConnector; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Function; @@ -65,6 +66,11 @@ public WriteEntity() { super(); } + public WriteEntity(Catalog catalog, WriteType type) { + super(catalog, true); + setWriteTypeInternal(type); + } + public WriteEntity(Database database, WriteType type) { super(database, true); setWriteTypeInternal(type); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 818a1c7af5ba..38db9452057b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -29,7 +29,6 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import static org.apache.hadoop.hive.common.AcidConstants.SOFT_DELETE_TABLE; - import static org.apache.hadoop.hive.conf.Constants.MATERIALIZED_VIEW_REWRITING_TIME_WINDOW; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_LOAD_DYNAMIC_PARTITIONS_SCAN_SPECIFIC_PARTITIONS; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_WRITE_NOTIFICATION_MAX_BATCH_SIZE; @@ -144,6 +143,7 @@ import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.AllTableConstraintsRequest; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest; import org.apache.hadoop.hive.metastore.api.CmRecycleRequest; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; @@ -151,8 +151,8 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.CompactionResponse; import org.apache.hadoop.hive.metastore.api.CompactionType; -import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.DataConnector; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest; import org.apache.hadoop.hive.metastore.api.DropDatabaseRequest; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; @@ -211,8 +211,8 @@ import org.apache.hadoop.hive.metastore.api.WMResourcePlan; import org.apache.hadoop.hive.metastore.api.WMTrigger; import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; -import org.apache.hadoop.hive.metastore.api.WriteNotificationLogRequest; import org.apache.hadoop.hive.metastore.api.WriteNotificationLogBatchRequest; +import org.apache.hadoop.hive.metastore.api.WriteNotificationLogRequest; import org.apache.hadoop.hive.metastore.api.AbortCompactionRequest; import org.apache.hadoop.hive.metastore.api.AbortCompactResponse; import org.apache.hadoop.hive.metastore.ReplChangeManager; @@ -624,6 +624,46 @@ public void close(boolean forceClose) { } } + /** + * Create a catalog + * @param catalog + * @param ifNotExist if true, will ignore AlreadyExistsException exception + * @throws AlreadyExistsException + * @throws HiveException + */ + public void createCatalog(Catalog catalog, boolean ifNotExist) + throws AlreadyExistsException, HiveException { + try { + getMSC().createCatalog(catalog); + } catch (AlreadyExistsException e) { + if (!ifNotExist) { + throw e; + } + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * Drop a catalog. + * @param catName + * @param ignoreUnknownCat if true, will ignore NoSuchObjectException. + * @throws HiveException + * @throws NoSuchObjectException + */ + public void dropCatalog(String catName, boolean ignoreUnknownCat) + throws HiveException, NoSuchObjectException { + try { + getMSC().dropCatalog(catName); + } catch (NoSuchObjectException e) { + if (!ignoreUnknownCat) { + throw e; + } + } catch (Exception e) { + throw new HiveException(e); + } + } + /** * Create a database * @param db @@ -2479,6 +2519,30 @@ public void validateDatabaseExists(String databaseName) throws SemanticException } } + public Catalog getCatalog(String catName) throws HiveException { + PerfLogger perfLogger = SessionState.getPerfLogger(); + perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_CATALOG); + try { + return getMSC().getCatalog(catName); + } catch (NoSuchObjectException e) { + return null; + } catch (Exception e) { + throw new HiveException(e); + } finally { + perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_CATALOG, "HS2-cache"); + } + } + + public void alterCatalog(String catName, Catalog catalog) throws HiveException { + try { + getMSC().alterCatalog(catName, catalog); + } catch (NoSuchObjectException e) { + throw new HiveException("Catalog " + catName + " does not exists.", e); + } catch (TException e) { + throw new HiveException("Unable to alter catalog " + catName + ". " + e.getMessage(), e); + } + } + /** * Query metadata to see if a database with the given name already exists. * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 238b5aa9668e..99a00ffd5fd0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.DataConnector; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -1869,6 +1870,22 @@ public static Path tryQualifyPath(Path path, HiveConf conf) { } } + protected Catalog getCatalog(String catName) throws SemanticException { + return getCatalog(catName, true); + } + + protected Catalog getCatalog(String catName, boolean throwException) throws SemanticException { + try { + Catalog catalog = db.getCatalog(catName); + if (catalog == null && throwException) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS.getMsg(catName)); + } + return catalog; + } catch (Exception e) { + throw new SemanticException("Failed to retrieve catalog " + catName + ": " + e.getMessage(), e); + } + } + protected Database getDatabase(String dbName) throws SemanticException { return getDatabase(dbName, true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index 9c982a365509..a045114720b8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -32,6 +32,8 @@ public enum HiveOperation { REPLDUMP("REPLDUMP", HiveParser.TOK_REPL_DUMP, new Privilege[]{Privilege.ALL}, null), REPLLOAD("REPLLOAD", HiveParser.TOK_REPL_LOAD, null, new Privilege[]{Privilege.ALL}), REPLSTATUS("REPLSTATUS", HiveParser.TOK_REPL_STATUS, new Privilege[]{Privilege.SELECT}, null), + CREATECATALOG("CREATECATALOG", HiveParser.TOK_CREATECATALOG, null, new Privilege[]{Privilege.CREATE}), + DROPCATALOG("DROPCATALOG", HiveParser.TOK_DROPCATALOG, null, new Privilege[]{Privilege.DROP}), CREATEDATABASE("CREATEDATABASE", HiveParser.TOK_CREATEDATABASE, null, new Privilege[]{Privilege.CREATE}), CREATEDATACONNECTOR("CREATEDATACONNECTOR", HiveParser.TOK_CREATEDATACONNECTOR, null, new Privilege[]{Privilege.CREATE}), DROPDATABASE("DROPDATABASE", HiveParser.TOK_DROPDATABASE, null, new Privilege[]{Privilege.DROP}), @@ -102,6 +104,8 @@ public enum HiveOperation { new Privilege[]{Privilege.ALTER_METADATA}, null), ALTERPARTITION_BUCKETNUM("ALTERPARTITION_BUCKETNUM", HiveParser.TOK_ALTERPARTITION_BUCKETS, new Privilege[]{Privilege.ALTER_METADATA}, null), + SHOWCATALOGS("SHOWCATALOGS", HiveParser.TOK_SHOWCATALOGS, new Privilege[]{Privilege.SHOW_CATALOG}, null, true, + false), SHOWDATABASES("SHOWDATABASES", HiveParser.TOK_SHOWDATABASES, new Privilege[]{Privilege.SHOW_DATABASE}, null, true, false), SHOWDATACONNECTORS("SHOWDATACONNECTORS", HiveParser.TOK_SHOWDATACONNECTORS, new Privilege[]{Privilege.SHOW_DATABASE}, null, true, @@ -165,11 +169,13 @@ public enum HiveOperation { new Privilege[]{Privilege.CREATE}), QUERY("QUERY", HiveParser.TOK_QUERY, new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.ALTER_DATA, Privilege.CREATE}, true, false), + ALTERCATALOG_LOCATION("ALTERCATALOG_LOCATION", HiveParser.TOK_ALTERCATALOG_LOCATION, new Privilege[]{Privilege.ALTER_METADATA}, null), ALTERDATABASE("ALTERDATABASE", HiveParser.TOK_ALTERDATABASE_PROPERTIES, null, null), ALTERDATABASE_OWNER("ALTERDATABASE_OWNER", HiveParser.TOK_ALTERDATABASE_OWNER, null, null), ALTERDATABASE_LOCATION("ALTERDATABASE_LOCATION", new int[] {HiveParser.TOK_ALTERDATABASE_LOCATION, HiveParser.TOK_ALTERDATABASE_MANAGEDLOCATION}, new Privilege[]{Privilege.ALTER_DATA}, null), + DESCCATALOG("DESCCATALOG", HiveParser.TOK_DESCCATALOG, null, null), DESCDATABASE("DESCDATABASE", HiveParser.TOK_DESCDATABASE, null, null), ALTERDATACONNECTOR("ALTERDATACONNECTOR", HiveParser.TOK_ALTERDATACONNECTOR_PROPERTIES, null, null), ALTERDATACONNECTOR_OWNER("ALTERDATABASE_OWNER", HiveParser.TOK_ALTERDATACONNECTOR_OWNER, null, null), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java index 9ce8b0754bd2..4b88fab806e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java @@ -117,6 +117,9 @@ public Privilege() { public static Privilege DELETE = new Privilege(PrivilegeType.DELETE, PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN); + public static Privilege SHOW_CATALOG = new Privilege(PrivilegeType.SELECT, + EnumSet.of(PrivilegeScope.USER_LEVEL_SCOPE)); + public static Privilege SHOW_DATABASE = new Privilege(PrivilegeType.SHOW_DATABASE, EnumSet.of(PrivilegeScope.USER_LEVEL_SCOPE)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java index 2578c570787e..0fbcf6d22aa0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java @@ -33,8 +33,10 @@ public enum HiveOperationType { REPLDUMP, REPLLOAD, REPLSTATUS, + CREATECATALOG, CREATEDATABASE, CREATEDATACONNECTOR, + DROPCATALOG, DROPDATABASE, DROPDATACONNECTOR, SWITCHDATABASE, @@ -75,6 +77,7 @@ public enum HiveOperationType { ALTERPARTITION_BUCKETNUM, ALTERTABLE_UPDATETABLESTATS, ALTERTABLE_UPDATEPARTSTATS, + SHOWCATALOGS, SHOWDATABASES, SHOWDATACONNECTORS, SHOWTABLES, @@ -121,12 +124,14 @@ public enum HiveOperationType { TRUNCATETABLE, CREATETABLE_AS_SELECT, QUERY, + ALTERCATALOG_LOCATION, ALTERDATABASE, ALTERDATABASE_OWNER, ALTERDATABASE_LOCATION, ALTERDATACONNECTOR, ALTERDATACONNECTOR_OWNER, ALTERDATACONNECTOR_URL, + DESCCATALOG, DESCDATABASE, DESCDATACONNECTOR, ALTERTABLE_MERGEFILES, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index 483be4f73d12..b727a3a6d25b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -470,6 +470,15 @@ public HivePrivilegeObjectType getObjectType() { op2Priv.put(HiveOperationType.ALTERDATACONNECTOR_URL, PrivRequirement.newIOPrivRequirement(null, ADMIN_PRIV_AR)); op2Priv.put(HiveOperationType.DESCDATACONNECTOR, PrivRequirement.newIOPrivRequirement(null, null)); op2Priv.put(HiveOperationType.SHOWDATACONNECTORS, PrivRequirement.newIOPrivRequirement(null, null)); + + op2Priv.put(HiveOperationType.CREATECATALOG, PrivRequirement.newPrivRequirementList( + new PrivRequirement(INS_SEL_DEL_NOGRANT_AR, HivePrivilegeObjectType.DFS_URI), + new PrivRequirement(INS_SEL_DEL_NOGRANT_AR, HivePrivilegeObjectType.LOCAL_URI), + new PrivRequirement(ADMIN_PRIV_AR, IOType.OUTPUT))); + op2Priv.put(HiveOperationType.DROPCATALOG, PrivRequirement.newIOPrivRequirement(null, ADMIN_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERCATALOG_LOCATION, PrivRequirement.newIOPrivRequirement(null, ADMIN_PRIV_AR)); + op2Priv.put(HiveOperationType.DESCCATALOG, PrivRequirement.newIOPrivRequirement(null, null)); + op2Priv.put(HiveOperationType.SHOWCATALOGS, PrivRequirement.newIOPrivRequirement(null, null)); } /** diff --git a/ql/src/test/queries/clientpositive/catalog.q b/ql/src/test/queries/clientpositive/catalog.q new file mode 100644 index 000000000000..173e9a065da0 --- /dev/null +++ b/ql/src/test/queries/clientpositive/catalog.q @@ -0,0 +1,47 @@ +set hive.mapred.mode=nonstrict; +set hive.support.concurrency = true; + +-- SORT_QUERY_RESULTS +SHOW CATALOGS; + +-- CREATE with comment +CREATE CATALOG test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive test catalog'; + +-- DESCRIBE +DESC CATALOG test_cat; + +-- CREATE INE already exists +CREATE CATALOG IF NOT EXISTS test_cat LOCATION '/tmp/test_cat'; +SHOW CATALOGS; + +-- DROP +DROP CATALOG test_cat; +SHOW CATALOGS; + +-- CREATE INE doesn't exist +CREATE CATALOG IF NOT EXISTS test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive test catalog'; +SHOW CATALOGS; + +-- DROP IE exists +DROP CATALOG IF EXISTS test_cat; +SHOW CATALOGS; + +-- DROP IE doesn't exist +DROP CATALOG IF EXISTS test_cat; + +-- SHOW +CREATE CATALOG test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive test catalog'; +SHOW CATALOGS; + +-- SHOW pattern +SHOW CATALOGS LIKE 'test%'; + +-- SHOW pattern +SHOW CATALOGS LIKE 'test_'; + +-- SHOW pattern +SHOW CATALOGS LIKE 'test__'; + +-- ALTER LOCATION +ALTER CATALOG test_cat SET LOCATION '/tmp/test_cat_new'; +DESC CATALOG EXTENDED test_cat; diff --git a/ql/src/test/results/clientpositive/llap/catalog.q.out b/ql/src/test/results/clientpositive/llap/catalog.q.out new file mode 100644 index 000000000000..6f9ef138dcd0 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/catalog.q.out @@ -0,0 +1,118 @@ +PREHOOK: query: SHOW CATALOGS +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS +POSTHOOK: type: SHOWCATALOGS +hive +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +PREHOOK: query: DESC CATALOG test_cat +PREHOOK: type: DESCCATALOG +PREHOOK: Input: catalog:test_cat +POSTHOOK: query: DESC CATALOG test_cat +POSTHOOK: type: DESCCATALOG +POSTHOOK: Input: catalog:test_cat +Catalog Name test_cat +Comment Hive test catalog +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +PREHOOK: query: SHOW CATALOGS +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS +POSTHOOK: type: SHOWCATALOGS +hive +test_cat +PREHOOK: query: DROP CATALOG test_cat +PREHOOK: type: DROPCATALOG +PREHOOK: Input: catalog:test_cat +PREHOOK: Output: catalog:test_cat +POSTHOOK: query: DROP CATALOG test_cat +POSTHOOK: type: DROPCATALOG +POSTHOOK: Input: catalog:test_cat +POSTHOOK: Output: catalog:test_cat +PREHOOK: query: SHOW CATALOGS +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS +POSTHOOK: type: SHOWCATALOGS +hive +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +PREHOOK: query: SHOW CATALOGS +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS +POSTHOOK: type: SHOWCATALOGS +hive +test_cat +PREHOOK: query: DROP CATALOG IF EXISTS test_cat +PREHOOK: type: DROPCATALOG +PREHOOK: Input: catalog:test_cat +PREHOOK: Output: catalog:test_cat +POSTHOOK: query: DROP CATALOG IF EXISTS test_cat +POSTHOOK: type: DROPCATALOG +POSTHOOK: Input: catalog:test_cat +POSTHOOK: Output: catalog:test_cat +PREHOOK: query: SHOW CATALOGS +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS +POSTHOOK: type: SHOWCATALOGS +hive +PREHOOK: query: DROP CATALOG IF EXISTS test_cat +PREHOOK: type: DROPCATALOG +POSTHOOK: query: DROP CATALOG IF EXISTS test_cat +POSTHOOK: type: DROPCATALOG +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +PREHOOK: query: SHOW CATALOGS +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS +POSTHOOK: type: SHOWCATALOGS +hive +test_cat +PREHOOK: query: SHOW CATALOGS LIKE 'test%' +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS LIKE 'test%' +POSTHOOK: type: SHOWCATALOGS +test_cat +PREHOOK: query: SHOW CATALOGS LIKE 'test_' +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS LIKE 'test_' +POSTHOOK: type: SHOWCATALOGS +PREHOOK: query: SHOW CATALOGS LIKE 'test__' +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS LIKE 'test__' +POSTHOOK: type: SHOWCATALOGS +#### A masked pattern was here #### +PREHOOK: type: ALTERCATALOG_LOCATION +PREHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +POSTHOOK: type: ALTERCATALOG_LOCATION +POSTHOOK: Output: catalog:test_cat +#### A masked pattern was here #### +PREHOOK: query: DESC CATALOG EXTENDED test_cat +PREHOOK: type: DESCCATALOG +PREHOOK: Input: catalog:test_cat +POSTHOOK: query: DESC CATALOG EXTENDED test_cat +POSTHOOK: type: DESCCATALOG +POSTHOOK: Input: catalog:test_cat +Catalog Name test_cat +Comment Hive test catalog +#### A masked pattern was here ####