From eafbecaab24691e1c15201671f27bd88b66a5f04 Mon Sep 17 00:00:00 2001 From: Dhruba Borthakur Date: Tue, 21 Oct 2008 18:11:05 +0000 Subject: [PATCH] HADOOP-4230. Fix for serde2 interface, limit operator, select * operator, UDF trim functions and sampling. (Ashish Thusoo via dhruba) git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk/src/contrib/hive@706704 13f79535-47bb-0310-9956-ffa450edef68 --- bin/hive | 38 +- build-common.xml | 5 +- build.xml | 2 + .../org/apache/hadoop/hive/cli/CliDriver.java | 79 +- .../hadoop/hive/cli/CliSessionState.java | 7 + .../hadoop/hive/cli/OptionsProcessor.java | 9 + .../org/apache/hadoop/hive/conf/HiveConf.java | 5 +- conf/hive-default.xml | 15 + data/files/kv1_cb.txt | 500 ++++ data/files/kv1_cc.txt | 500 ++++ data/files/kv4.txt | 1 + metastore/if/hive_metastore.thrift | 5 +- .../hadoop/hive/metastore/HiveMetaStore.java | 48 +- .../hive/metastore/HiveMetaStoreClient.java | 3 +- .../hive/metastore/IMetaStoreClient.java | 14 + .../hive/metastore/MetaStoreClient.java | 5 + .../hadoop/hive/metastore/MetaStoreUtils.java | 93 +- .../hadoop/hive/metastore/ObjectStore.java | 26 +- ql/build.xml | 7 + .../org/apache/hadoop/hive/ql/Context.java | 22 + .../org/apache/hadoop/hive/ql/Driver.java | 107 +- .../hadoop/hive/ql/exec/ColumnInfo.java | 15 +- .../apache/hadoop/hive/ql/exec/DDLTask.java | 277 +- .../hadoop/hive/ql/exec/ExecDriver.java | 128 +- .../hadoop/hive/ql/exec/ExecMapper.java | 27 +- .../hadoop/hive/ql/exec/ExecReducer.java | 14 + .../hadoop/hive/ql/exec/ExplainTask.java | 16 +- .../hive/ql/exec/ExprNodeFieldEvaluator.java | 44 +- .../hive/ql/exec/ExprNodeIndexEvaluator.java | 34 +- .../apache/hadoop/hive/ql/exec/FetchTask.java | 204 ++ .../hadoop/hive/ql/exec/FileSinkOperator.java | 22 +- .../hadoop/hive/ql/exec/FunctionRegistry.java | 17 +- .../hadoop/hive/ql/exec/LimitOperator.java | 53 + .../hadoop/hive/ql/exec/MapRedTask.java | 11 + .../apache/hadoop/hive/ql/exec/Operator.java | 26 +- .../hadoop/hive/ql/exec/OperatorFactory.java | 1 + .../hive/ql/exec/ReduceSinkOperator.java | 4 +- .../org/apache/hadoop/hive/ql/exec/Task.java | 13 + .../hadoop/hive/ql/exec/TaskFactory.java | 1 + .../org/apache/hadoop/hive/ql/exec/UDF.java | 22 +- .../apache/hadoop/hive/ql/exec/Utilities.java | 6 +- .../hadoop/hive/ql/io/HiveInputFormat.java | 5 +- .../hadoop/hive/ql/io/HiveRecordReader.java | 61 + .../apache/hadoop/hive/ql/metadata/Hive.java | 16 +- .../hadoop/hive/ql/metadata/Partition.java | 8 + .../apache/hadoop/hive/ql/metadata/Table.java | 4 +- .../hive/ql/parse/BaseSemanticAnalyzer.java | 70 +- .../hive/ql/parse/DDLSemanticAnalyzer.java | 128 +- .../apache/hadoop/hive/ql/parse/ErrorMsg.java | 8 +- .../ql/parse/ExplainSemanticAnalyzer.java | 22 +- .../ql/parse/FunctionSemanticAnalyzer.java | 2 +- .../org/apache/hadoop/hive/ql/parse/Hive.g | 64 +- .../hive/ql/parse/LoadSemanticAnalyzer.java | 4 +- .../hadoop/hive/ql/parse/PartitionPruner.java | 90 +- .../org/apache/hadoop/hive/ql/parse/QB.java | 34 +- .../hadoop/hive/ql/parse/QBMetaData.java | 20 +- .../hadoop/hive/ql/parse/QBParseInfo.java | 99 +- .../hadoop/hive/ql/parse/RowResolver.java | 8 +- .../hive/ql/parse/SemanticAnalyzer.java | 899 ++++--- .../ql/parse/SemanticAnalyzerFactory.java | 5 +- .../apache/hadoop/hive/ql/plan/DDLWork.java | 34 +- .../apache/hadoop/hive/ql/plan/PlanUtils.java | 21 +- .../hadoop/hive/ql/plan/alterTableDesc.java | 10 +- .../hadoop/hive/ql/plan/createTableDesc.java | 40 +- .../hadoop/hive/ql/plan/descTableDesc.java | 47 +- .../hadoop/hive/ql/plan/dropTableDesc.java | 23 + .../hive/ql/plan/exprNodeColumnDesc.java | 3 + .../hive/ql/plan/exprNodeFieldDesc.java | 20 +- .../apache/hadoop/hive/ql/plan/fetchWork.java | 131 + .../apache/hadoop/hive/ql/plan/limitDesc.java | 39 + .../hadoop/hive/ql/plan/loadFileDesc.java | 22 +- .../hadoop/hive/ql/plan/mapredWork.java | 10 + .../hadoop/hive/ql/plan/reduceSinkDesc.java | 36 +- .../hive/ql/plan/showPartitionsDesc.java | 72 + .../apache/hadoop/hive/ql/plan/tableDesc.java | 2 +- .../hive/ql/typeinfo/StructTypeInfo.java | 6 +- .../apache/hadoop/hive/ql/udf/UDAFAvg.java | 22 +- .../apache/hadoop/hive/ql/udf/UDAFCount.java | 5 +- .../apache/hadoop/hive/ql/udf/UDAFMax.java | 18 +- .../apache/hadoop/hive/ql/udf/UDAFMin.java | 18 +- .../apache/hadoop/hive/ql/udf/UDAFSum.java | 18 +- .../hadoop/hive/ql/udf/UDFBaseCompare.java | 28 +- .../UDFCeil.java} | 33 +- .../apache/hadoop/hive/ql/udf/UDFConcat.java | 2 +- .../hive/ql/udf/UDFDefaultSampleHashFn.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFFloor.java | 41 + .../apache/hadoop/hive/ql/udf/UDFLTrim.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFLike.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFLower.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFOPAnd.java | 2 +- .../hadoop/hive/ql/udf/UDFOPBitAnd.java | 2 +- .../hadoop/hive/ql/udf/UDFOPBitNot.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFOPBitOr.java | 2 +- .../hadoop/hive/ql/udf/UDFOPBitXor.java | 2 +- .../hadoop/hive/ql/udf/UDFOPDivide.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFOPMinus.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFOPMod.java | 2 +- .../hadoop/hive/ql/udf/UDFOPMultiply.java | 2 +- .../hadoop/hive/ql/udf/UDFOPNegative.java | 73 + .../apache/hadoop/hive/ql/udf/UDFOPNot.java | 2 +- .../hadoop/hive/ql/udf/UDFOPNotNull.java | 14 +- .../apache/hadoop/hive/ql/udf/UDFOPNull.java | 14 +- .../apache/hadoop/hive/ql/udf/UDFOPOr.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFOPPlus.java | 2 +- .../hadoop/hive/ql/udf/UDFOPPositive.java | 53 + .../apache/hadoop/hive/ql/udf/UDFRTrim.java | 4 +- .../apache/hadoop/hive/ql/udf/UDFRand.java | 49 + .../apache/hadoop/hive/ql/udf/UDFRegExp.java | 2 +- .../hadoop/hive/ql/udf/UDFRegExpReplace.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFRound.java | 41 + .../apache/hadoop/hive/ql/udf/UDFSize.java | 35 +- .../apache/hadoop/hive/ql/udf/UDFStrEq.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFStrGe.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFStrGt.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFStrLe.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFStrLt.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFStrNe.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFSubstr.java | 2 +- .../hadoop/hive/ql/udf/UDFToBoolean.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFToByte.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFToDate.java | 2 +- .../hadoop/hive/ql/udf/UDFToDouble.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFToFloat.java | 2 +- .../hadoop/hive/ql/udf/UDFToInteger.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFToLong.java | 2 +- .../hadoop/hive/ql/udf/UDFToString.java | 2 +- .../apache/hadoop/hive/ql/udf/UDFTrim.java | 4 +- .../apache/hadoop/hive/ql/udf/UDFUpper.java | 2 +- .../org/apache/hadoop/hive/ql/QTestUtil.java | 11 +- .../hadoop/hive/ql/exec/TestExecDriver.java | 20 +- .../hadoop/hive/ql/metadata/TestHive.java | 8 +- .../hadoop/hive/ql/udf/UDFTestLength.java | 2 +- .../apache/hadoop/hive/serde2/TestSerDe.java | 190 ++ .../queries/clientnegative/strict_pruning.q | 6 + .../queries/clientpositive/case_sensitivity.q | 10 + .../queries/clientpositive/groupby1_limit.q | 10 + .../queries/clientpositive/groupby2_limit.q | 7 + ql/src/test/queries/clientpositive/input.q | 4 + .../queries/clientpositive/input11_limit.q | 10 + .../queries/clientpositive/input14_limit.q | 20 + ql/src/test/queries/clientpositive/input16.q | 5 + .../test/queries/clientpositive/input16_cc.q | 6 + .../queries/clientpositive/input1_limit.q | 18 + .../queries/clientpositive/input2_limit.q | 4 + ql/src/test/queries/clientpositive/input3.q | 7 + .../queries/clientpositive/input4_cb_delim.q | 4 + .../test/queries/clientpositive/input_dfs.q | 4 + .../clientpositive/input_dynamicserde.q | 15 + .../test/queries/clientpositive/input_limit.q | 4 + .../test/queries/clientpositive/input_part1.q | 2 +- .../test/queries/clientpositive/input_part2.q | 16 + .../queries/clientpositive/input_testxpath.q | 6 +- .../queries/clientpositive/input_testxpath2.q | 10 + .../queries/clientpositive/input_testxpath3.q | 6 + .../test/queries/clientpositive/inputddl4.q | 2 + .../test/queries/clientpositive/inputddl6.q | 16 + ql/src/test/queries/clientpositive/join9.q | 10 + .../test/queries/clientpositive/nullinput.q | 4 + .../test/queries/clientpositive/show_tables.q | 15 + .../test/queries/clientpositive/showparts.q | 4 + ql/src/test/queries/clientpositive/subq2.q | 8 + ql/src/test/queries/clientpositive/udf2.q | 12 +- ql/src/test/queries/clientpositive/udf3.q | 10 + ql/src/test/queries/clientpositive/udf4.q | 8 + ql/src/test/queries/negative/invalid_dot.q | 2 + ql/src/test/queries/negative/invalid_index.q | 2 + .../queries/negative/invalid_list_index.q | 2 + .../queries/negative/invalid_list_index2.q | 2 + .../test/queries/negative/invalid_map_index.q | 2 + .../queries/negative/invalid_map_index2.q | 2 + .../test/queries/positive/case_sensitivity.q | 2 + .../test/queries/positive/input_testxpath.q | 2 +- .../test/queries/positive/input_testxpath2.q | 2 + ql/src/test/queries/positive/udf4.q | 1 + .../clientnegative/strict_pruning.q.out | 1 + .../clientpositive/case_sensitivity.q.out | 49 + .../test/results/clientpositive/cast1.q.out | 11 +- .../results/clientpositive/groupby1.q.out | 73 +- .../clientpositive/groupby1_limit.q.out | 86 + .../results/clientpositive/groupby2.q.out | 82 +- .../clientpositive/groupby2_limit.q.out | 78 + .../results/clientpositive/groupby3.q.out | 80 +- .../results/clientpositive/groupby4.q.out | 56 +- .../results/clientpositive/groupby5.q.out | 73 +- .../results/clientpositive/groupby6.q.out | 56 +- .../results/clientpositive/groupby7.q.out | 2 - .../results/clientpositive/groupby8.q.out | 2 - .../test/results/clientpositive/input.q.out | 512 ++++ .../test/results/clientpositive/input1.q.out | 2 +- .../test/results/clientpositive/input10.q.out | 2 +- .../test/results/clientpositive/input11.q.out | 11 +- .../clientpositive/input11_limit.q.out | 51 + .../test/results/clientpositive/input12.q.out | 33 +- .../test/results/clientpositive/input13.q.out | 39 +- .../test/results/clientpositive/input14.q.out | 47 +- .../clientpositive/input14_limit.q.out | 95 + .../test/results/clientpositive/input15.q.out | 8 +- .../test/results/clientpositive/input16.q.out | 500 ++++ .../results/clientpositive/input16_cc.q.out | 500 ++++ .../results/clientpositive/input1_limit.q.out | 79 + .../results/clientpositive/input2_limit.q.out | 38 + .../test/results/clientpositive/input3.q.out | 27 +- .../test/results/clientpositive/input4.q.out | 11 +- .../clientpositive/input4_cb_delim.q.out | 500 ++++ .../test/results/clientpositive/input5.q.out | 39 +- .../test/results/clientpositive/input6.q.out | 10 +- .../test/results/clientpositive/input7.q.out | 31 +- .../test/results/clientpositive/input8.q.out | 11 +- .../test/results/clientpositive/input9.q.out | 10 +- .../results/clientpositive/input_dfs.q.out | 500 ++++ .../clientpositive/input_dynamicserde.q.out | 52 + .../results/clientpositive/input_limit.q.out | 32 + .../results/clientpositive/input_part1.q.out | 60 +- .../results/clientpositive/input_part2.q.out | 333 +++ .../input_testsequencefile.q.out | 11 +- .../clientpositive/input_testxpath.q.out | 35 +- .../clientpositive/input_testxpath2.q.out | 52 + .../clientpositive/input_testxpath3.q.out | 38 + .../results/clientpositive/inputddl1.q.out | 6 +- .../results/clientpositive/inputddl2.q.out | 8 +- .../results/clientpositive/inputddl3.q.out | 8 +- .../results/clientpositive/inputddl4.q.out | 11 + .../results/clientpositive/inputddl5.q.out | 2 - .../results/clientpositive/inputddl6.q.out | 29 + .../test/results/clientpositive/join1.q.out | 53 +- .../test/results/clientpositive/join2.q.out | 81 +- .../test/results/clientpositive/join3.q.out | 61 +- .../test/results/clientpositive/join4.q.out | 81 +- .../test/results/clientpositive/join5.q.out | 81 +- .../test/results/clientpositive/join6.q.out | 81 +- .../test/results/clientpositive/join7.q.out | 105 +- .../test/results/clientpositive/join8.q.out | 89 +- .../test/results/clientpositive/join9.q.out | 1169 ++++++++ .../results/clientpositive/nullinput.q.out | 0 .../test/results/clientpositive/sample1.q.out | 82 +- .../test/results/clientpositive/sample2.q.out | 74 +- .../test/results/clientpositive/sample4.q.out | 1077 ++++---- .../test/results/clientpositive/sample5.q.out | 476 ++-- .../test/results/clientpositive/sample6.q.out | 565 ++-- .../test/results/clientpositive/sample7.q.out | 471 ++-- .../results/clientpositive/show_tables.q.out | 28 + .../results/clientpositive/showparts.q.out | 17 + ql/src/test/results/clientpositive/subq.q.out | 2 +- .../test/results/clientpositive/subq2.q.out | 340 +++ ql/src/test/results/clientpositive/udf1.q.out | 11 +- ql/src/test/results/clientpositive/udf2.q.out | 548 +--- ql/src/test/results/clientpositive/udf3.q.out | 91 + ql/src/test/results/clientpositive/udf4.q.out | 57 + .../test/results/clientpositive/union.q.out | 2 +- .../results/compiler/errors/invalid_dot.q.out | 2 + .../compiler/errors/invalid_index.q.out | 2 + .../compiler/errors/invalid_list_index.q.out | 2 + .../compiler/errors/invalid_list_index2.q.out | 2 + .../compiler/errors/invalid_map_index.q.out | 2 + .../compiler/errors/invalid_map_index2.q.out | 2 + .../compiler/parse/case_sensitivity.q.out | 1 + .../compiler/parse/input_testxpath.q.out | 2 +- .../compiler/parse/input_testxpath2.q.out | 1 + ql/src/test/results/compiler/parse/udf4.q.out | 1 + .../compiler/plan/case_sensitivity.q.xml | 521 ++++ ql/src/test/results/compiler/plan/cast1.q.xml | 124 +- .../test/results/compiler/plan/groupby1.q.xml | 273 +- .../test/results/compiler/plan/groupby2.q.xml | 402 ++- .../test/results/compiler/plan/groupby3.q.xml | 428 ++- .../test/results/compiler/plan/groupby4.q.xml | 292 +- .../test/results/compiler/plan/groupby5.q.xml | 273 +- .../test/results/compiler/plan/groupby6.q.xml | 292 +- .../test/results/compiler/plan/input1.q.xml | 36 +- .../test/results/compiler/plan/input2.q.xml | 72 +- .../test/results/compiler/plan/input3.q.xml | 79 +- .../test/results/compiler/plan/input4.q.xml | 39 +- .../test/results/compiler/plan/input5.q.xml | 41 +- .../test/results/compiler/plan/input6.q.xml | 38 +- .../test/results/compiler/plan/input7.q.xml | 46 +- .../test/results/compiler/plan/input8.q.xml | 90 +- .../test/results/compiler/plan/input9.q.xml | 46 +- .../results/compiler/plan/input_part1.q.xml | 48 +- .../plan/input_testsequencefile.q.xml | 36 +- .../compiler/plan/input_testxpath.q.xml | 122 +- .../compiler/plan/input_testxpath2.q.xml | 621 +++++ ql/src/test/results/compiler/plan/join1.q.xml | 42 +- ql/src/test/results/compiler/plan/join2.q.xml | 72 +- ql/src/test/results/compiler/plan/join3.q.xml | 45 +- ql/src/test/results/compiler/plan/join4.q.xml | 42 +- ql/src/test/results/compiler/plan/join5.q.xml | 42 +- ql/src/test/results/compiler/plan/join6.q.xml | 42 +- ql/src/test/results/compiler/plan/join7.q.xml | 45 +- ql/src/test/results/compiler/plan/join8.q.xml | 46 +- .../test/results/compiler/plan/sample1.q.xml | 48 +- .../test/results/compiler/plan/sample2.q.xml | 36 +- .../test/results/compiler/plan/sample3.q.xml | 38 +- .../test/results/compiler/plan/sample4.q.xml | 38 +- .../test/results/compiler/plan/sample5.q.xml | 38 +- .../test/results/compiler/plan/sample6.q.xml | 38 +- .../test/results/compiler/plan/sample7.q.xml | 38 +- ql/src/test/results/compiler/plan/subq.q.xml | 25 +- ql/src/test/results/compiler/plan/udf1.q.xml | 172 +- ql/src/test/results/compiler/plan/udf4.q.xml | 1038 ++++++++ ql/src/test/results/compiler/plan/union.q.xml | 25 +- .../test/templates/TestNegativeCliDriver.vm | 83 + ql/src/test/templates/TestParseNegative.vm | 10 +- serde/build.xml | 17 +- serde/if/serde.thrift | 1 + serde/if/test/complex.thrift | 15 + serde/if/{ => test}/testthrift.thrift | 0 .../apache/hadoop/hive/serde/Constants.java | 2 + .../dynamic_type/DynamicSerDeAsync.java | 24 +- .../DynamicSerDeCommaOrSemicolon.java | 32 + .../dynamic_type/DynamicSerDeConst.java | 32 + .../dynamic_type/DynamicSerDeConstList.java | 32 + .../DynamicSerDeConstListContents.java | 32 + .../dynamic_type/DynamicSerDeConstMap.java | 32 + .../DynamicSerDeConstMapContents.java | 32 + .../dynamic_type/DynamicSerDeConstValue.java | 32 + .../dynamic_type/DynamicSerDeDefinition.java | 32 + .../DynamicSerDeDefinitionType.java | 32 + .../serde2/dynamic_type/DynamicSerDeEnum.java | 32 + .../dynamic_type/DynamicSerDeEnumDef.java | 32 + .../dynamic_type/DynamicSerDeEnumDefList.java | 32 + .../dynamic_type/DynamicSerDeExtends.java | 32 + .../DynamicSerDeFieldRequiredness.java | 32 + .../dynamic_type/DynamicSerDeFieldValue.java | 32 + .../dynamic_type/DynamicSerDeFlagArgs.java | 32 + .../DynamicSerDeFunctionType.java | 32 + .../dynamic_type/DynamicSerDeHeader.java | 32 + .../dynamic_type/DynamicSerDeHeaderList.java | 32 + .../dynamic_type/DynamicSerDeInclude.java | 32 + .../dynamic_type/DynamicSerDeNamespace.java | 32 + .../dynamic_type/DynamicSerDeSenum.java | 32 + .../dynamic_type/DynamicSerDeSenumDef.java | 32 + .../DynamicSerDeSenumDefList.java | 32 + .../dynamic_type/DynamicSerDeService.java | 32 + .../dynamic_type/DynamicSerDeStart.java | 32 + .../dynamic_type/DynamicSerDeThrows.java | 32 + .../DynamicSerDeTypeDefinition.java | 32 + .../dynamic_type/DynamicSerDeUnflagArgs.java | 32 + .../dynamic_type/DynamicSerDeXception.java | 32 + .../dynamic_type/JJTthrift_grammarState.java | 123 + .../hadoop/hive/serde2/dynamic_type/Node.java | 34 + .../serde2/dynamic_type/ParseException.java | 192 ++ .../serde2/dynamic_type/SimpleCharStream.java | 439 +++ .../hive/serde2/dynamic_type/SimpleNode.java | 72 + .../hive/serde2/dynamic_type/Token.java | 81 + .../serde2/dynamic_type/TokenMgrError.java | 133 + .../serde2/dynamic_type/thrift_grammar.java | 2290 ++++++++++++++++ .../serde2/dynamic_type/thrift_grammar.jj | 2345 +++++++++++++++++ .../dynamic_type/thrift_grammarConstants.java | 133 + .../thrift_grammarTokenManager.java | 1455 ++++++++++ .../thrift_grammarTreeConstants.java | 105 + .../hive/serde2/thrift/test}/Complex.java | 170 +- .../hive/serde2/thrift/test/Constants.java | 18 + .../hive/serde2/thrift/test}/IntString.java | 54 +- .../__init__.py | 0 .../org_apache_hadoop_hive_serde/constants.py | 64 + .../ttypes.py | 0 serde/src/gen-py/serde/constants.py | 17 - .../hive/serde/ByteStreamTypedSerDe.java | 1 + .../hive/serde/ReflectionSerDeField.java | 26 +- .../org/apache/hadoop/hive/serde/SerDe.java | 2 - .../hive/serde/dynamic_type/DynamicSerDe.java | 19 - .../hadoop/hive/serde/jute/JuteSerDe.java | 18 - .../MetadataTypedColumnsetSerDe.java | 21 +- .../thrift/ThriftByteStreamTypedSerDe.java | 20 +- .../hadoop/hive/serde/thrift/ThriftSerDe.java | 18 - .../hive/serde/thrift/ThriftSerDeField.java | 23 +- .../hive/serde/thrift/columnsetSerDe.java | 41 +- .../hadoop/hive/serde2/Deserializer.java | 2 - .../serde2/MetadataTypedColumnsetSerDe.java | 60 +- .../apache/hadoop/hive/serde2/SerDeUtils.java | 70 +- .../apache/hadoop/hive/serde2/Serializer.java | 1 - .../serde2/ThriftByteStreamTypedSerDe.java | 11 - .../hive/serde2/ThriftDeserializer.java | 20 - .../serde2/dynamic_type/DynamicSerDe.java | 186 ++ .../dynamic_type/DynamicSerDeField.java | 47 + .../dynamic_type/DynamicSerDeFieldList.java | 214 ++ .../dynamic_type/DynamicSerDeFieldType.java | 42 + .../dynamic_type/DynamicSerDeFunction.java | 54 + .../dynamic_type/DynamicSerDeSimpleNode.java | 31 + .../dynamic_type/DynamicSerDeStruct.java | 51 + .../dynamic_type/DynamicSerDeStructBase.java | 89 + .../dynamic_type/DynamicSerDeTypeBase.java | 63 + .../dynamic_type/DynamicSerDeTypeBool.java | 64 + .../dynamic_type/DynamicSerDeTypeByte.java | 64 + .../dynamic_type/DynamicSerDeTypeDouble.java | 61 + .../dynamic_type/DynamicSerDeTypeList.java | 113 + .../dynamic_type/DynamicSerDeTypeMap.java | 129 + .../dynamic_type/DynamicSerDeTypeSet.java | 115 + .../dynamic_type/DynamicSerDeTypeString.java | 71 + .../dynamic_type/DynamicSerDeTypedef.java | 81 + .../dynamic_type/DynamicSerDeTypei16.java | 65 + .../dynamic_type/DynamicSerDeTypei32.java | 67 + .../dynamic_type/DynamicSerDeTypei64.java | 65 + .../serde2/dynamic_type/thrift_grammar.jjt | 866 ++++++ .../ReflectionStructObjectInspector.java | 11 +- .../serde2/thrift/ConfigurableTProtocol.java | 40 + .../serde2/thrift/TCTLSeparatedProtocol.java | 576 ++++ .../hadoop/hive/serde/TestSerDeUtils.java | 2 +- .../hive/serde/TestTCTLSeparatedProtocol.java | 83 - .../serde2/TestTCTLSeparatedProtocol.java | 245 ++ .../serde2/dynamic_type/TestDynamicSerDe.java | 204 ++ .../TestObjectInspectorUtils.java | 12 +- .../TestThriftObjectInspectors.java | 12 +- .../hive/serde2/thrift_test/Complex.java | 363 --- .../hive/serde2/thrift_test/IntString.java | 167 -- testutils/run_tests | 8 +- 405 files changed, 28154 insertions(+), 6709 deletions(-) create mode 100644 data/files/kv1_cb.txt create mode 100644 data/files/kv1_cc.txt create mode 100644 data/files/kv4.txt create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/plan/showPartitionsDesc.java rename ql/src/java/org/apache/hadoop/hive/ql/{parse/OperatorInfoList.java => udf/UDFCeil.java} (63%) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java rename serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/Constants.java => ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java (69%) create mode 100644 ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java create mode 100644 ql/src/test/queries/clientnegative/strict_pruning.q create mode 100644 ql/src/test/queries/clientpositive/case_sensitivity.q create mode 100644 ql/src/test/queries/clientpositive/groupby1_limit.q create mode 100644 ql/src/test/queries/clientpositive/groupby2_limit.q create mode 100644 ql/src/test/queries/clientpositive/input.q create mode 100644 ql/src/test/queries/clientpositive/input11_limit.q create mode 100644 ql/src/test/queries/clientpositive/input14_limit.q create mode 100644 ql/src/test/queries/clientpositive/input16.q create mode 100644 ql/src/test/queries/clientpositive/input16_cc.q create mode 100644 ql/src/test/queries/clientpositive/input1_limit.q create mode 100644 ql/src/test/queries/clientpositive/input2_limit.q create mode 100644 ql/src/test/queries/clientpositive/input4_cb_delim.q create mode 100644 ql/src/test/queries/clientpositive/input_dfs.q create mode 100644 ql/src/test/queries/clientpositive/input_dynamicserde.q create mode 100644 ql/src/test/queries/clientpositive/input_limit.q create mode 100644 ql/src/test/queries/clientpositive/input_part2.q create mode 100644 ql/src/test/queries/clientpositive/input_testxpath2.q create mode 100644 ql/src/test/queries/clientpositive/input_testxpath3.q create mode 100644 ql/src/test/queries/clientpositive/inputddl6.q create mode 100644 ql/src/test/queries/clientpositive/join9.q create mode 100644 ql/src/test/queries/clientpositive/nullinput.q create mode 100644 ql/src/test/queries/clientpositive/show_tables.q create mode 100644 ql/src/test/queries/clientpositive/showparts.q create mode 100644 ql/src/test/queries/clientpositive/subq2.q create mode 100644 ql/src/test/queries/clientpositive/udf3.q create mode 100644 ql/src/test/queries/clientpositive/udf4.q create mode 100644 ql/src/test/queries/negative/invalid_dot.q create mode 100644 ql/src/test/queries/negative/invalid_index.q create mode 100644 ql/src/test/queries/negative/invalid_list_index.q create mode 100644 ql/src/test/queries/negative/invalid_list_index2.q create mode 100644 ql/src/test/queries/negative/invalid_map_index.q create mode 100644 ql/src/test/queries/negative/invalid_map_index2.q create mode 100644 ql/src/test/queries/positive/case_sensitivity.q create mode 100644 ql/src/test/queries/positive/input_testxpath2.q create mode 100644 ql/src/test/queries/positive/udf4.q create mode 100644 ql/src/test/results/clientnegative/strict_pruning.q.out create mode 100644 ql/src/test/results/clientpositive/case_sensitivity.q.out create mode 100644 ql/src/test/results/clientpositive/groupby1_limit.q.out create mode 100644 ql/src/test/results/clientpositive/groupby2_limit.q.out create mode 100644 ql/src/test/results/clientpositive/input.q.out create mode 100644 ql/src/test/results/clientpositive/input11_limit.q.out create mode 100644 ql/src/test/results/clientpositive/input14_limit.q.out create mode 100644 ql/src/test/results/clientpositive/input16.q.out create mode 100644 ql/src/test/results/clientpositive/input16_cc.q.out create mode 100644 ql/src/test/results/clientpositive/input1_limit.q.out create mode 100644 ql/src/test/results/clientpositive/input2_limit.q.out create mode 100644 ql/src/test/results/clientpositive/input4_cb_delim.q.out create mode 100644 ql/src/test/results/clientpositive/input_dfs.q.out create mode 100644 ql/src/test/results/clientpositive/input_dynamicserde.q.out create mode 100644 ql/src/test/results/clientpositive/input_limit.q.out create mode 100644 ql/src/test/results/clientpositive/input_part2.q.out create mode 100644 ql/src/test/results/clientpositive/input_testxpath2.q.out create mode 100644 ql/src/test/results/clientpositive/input_testxpath3.q.out create mode 100644 ql/src/test/results/clientpositive/inputddl6.q.out create mode 100644 ql/src/test/results/clientpositive/join9.q.out create mode 100644 ql/src/test/results/clientpositive/nullinput.q.out create mode 100644 ql/src/test/results/clientpositive/show_tables.q.out create mode 100644 ql/src/test/results/clientpositive/showparts.q.out create mode 100644 ql/src/test/results/clientpositive/subq2.q.out create mode 100644 ql/src/test/results/clientpositive/udf3.q.out create mode 100644 ql/src/test/results/clientpositive/udf4.q.out create mode 100644 ql/src/test/results/compiler/errors/invalid_dot.q.out create mode 100644 ql/src/test/results/compiler/errors/invalid_index.q.out create mode 100644 ql/src/test/results/compiler/errors/invalid_list_index.q.out create mode 100644 ql/src/test/results/compiler/errors/invalid_list_index2.q.out create mode 100644 ql/src/test/results/compiler/errors/invalid_map_index.q.out create mode 100644 ql/src/test/results/compiler/errors/invalid_map_index2.q.out create mode 100644 ql/src/test/results/compiler/parse/case_sensitivity.q.out create mode 100644 ql/src/test/results/compiler/parse/input_testxpath2.q.out create mode 100644 ql/src/test/results/compiler/parse/udf4.q.out create mode 100644 ql/src/test/results/compiler/plan/case_sensitivity.q.xml create mode 100644 ql/src/test/results/compiler/plan/input_testxpath2.q.xml create mode 100644 ql/src/test/results/compiler/plan/udf4.q.xml create mode 100644 ql/src/test/templates/TestNegativeCliDriver.vm create mode 100644 serde/if/test/complex.thrift rename serde/if/{ => test}/testthrift.thrift (100%) rename ql/src/java/org/apache/hadoop/hive/ql/thrift/Constants.java => serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java (69%) create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConst.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstList.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstListContents.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstValue.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinitionType.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnum.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDef.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDefList.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldValue.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeader.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeaderList.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeService.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDefinition.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeUnflagArgs.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Token.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jj create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java rename {ql/src/java/org/apache/hadoop/hive/ql/thrift => serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test}/Complex.java (61%) create mode 100644 serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/Constants.java rename {ql/src/java/org/apache/hadoop/hive/ql/thrift => serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test}/IntString.java (65%) rename serde/src/gen-py/{serde => org_apache_hadoop_hive_serde}/__init__.py (100%) create mode 100644 serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py rename serde/src/gen-py/{serde => org_apache_hadoop_hive_serde}/ttypes.py (100%) delete mode 100644 serde/src/gen-py/serde/constants.py create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldType.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunction.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSimpleNode.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStruct.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBase.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBool.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeByte.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDouble.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeList.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeString.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypedef.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei16.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei32.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei64.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jjt create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/thrift/ConfigurableTProtocol.java create mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java delete mode 100644 serde/src/test/org/apache/hadoop/hive/serde/TestTCTLSeparatedProtocol.java create mode 100644 serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java create mode 100644 serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java delete mode 100644 serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/Complex.java delete mode 100644 serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/IntString.java diff --git a/bin/hive b/bin/hive index ff00645aa..947a89463 100755 --- a/bin/hive +++ b/bin/hive @@ -48,19 +48,36 @@ done # add the auxillary jars such as serdes if [ -d "${HIVE_AUX_JARS_PATH}" ]; then for f in ${HIVE_AUX_JARS_PATH}/*.jar; do - AUX_CLASSPATH=${AUX_CLASSPATH}:$f; + if [[ ! -f $f ]]; then + continue; + fi + AUX_CLASSPATH=${AUX_CLASSPATH}:$f if [ "${AUX_PARAM}" == "" ]; then - AUX_PARAM=$f + AUX_PARAM=file://$f else - AUX_PARAM=${AUX_PARAM},$f; + AUX_PARAM=${AUX_PARAM},file://$f; fi done -else +elif [ "${HIVE_AUX_JARS_PATH}" != "" ]; then AUX_CLASSPATH=${HIVE_AUX_JARS_PATH} - AUX_PARAM=${HIVE_AUX_JARS_PATH} + AUX_PARAM=file://${HIVE_AUX_JARS_PATH} + AUX_PARAM=`echo $AUX_PARAM | sed 's/,/,file:\/\//g'` fi -CLASSPATH=${CLASSPATH}:${AUX_CLASSPATH} +# adding jars from auxlib directory +for f in ${HIVE_HOME}/auxlib/*.jar; do + if [[ ! -f $f ]]; then + continue; + fi + + AUX_CLASSPATH=${AUX_CLASSPATH}:$f + if [ "${AUX_PARAM}" == "" ]; then + AUX_PARAM=file://$f + else + AUX_PARAM=${AUX_PARAM},file://$f; + fi +done +CLASSPATH=${CLASSPATH}:${AUX_CLASSPATH} # pass classpath to hadoop export HADOOP_CLASSPATH=${CLASSPATH} @@ -79,23 +96,24 @@ fi HADOOP=$HADOOP_HOME/bin/hadoop if [ ! -f ${HADOOP} ]; then - echo "Cannot find hadoop installation: \"$HADOOP\" does not exist"; + echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path"; exit 4; fi # override default value of hadoop.bin.path to point to what we are running off # if the user specified a -D override - this will be further overriden -export HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.bin.path=$HADOOP" +export HIVE_OPTS="-hiveconf hadoop.bin.path=$HADOOP" CLASS=org.apache.hadoop.hive.cli.CliDriver if [ "${AUX_PARAM}" != "" ]; then - HADOOP_OPTS="-Dhive.aux.jars.path=${AUX_PARAM}" + HIVE_OPTS="$HIVE_OPTS -hiveconf hive.aux.jars.path=${AUX_PARAM}" AUX_JARS_CMD_LINE="-libjars ${AUX_PARAM}" fi #echo "CLASSPATH="$CLASSPATH #echo "AUX_LIB_PATH"=$AUX_PARAM #echo "HADOOP_OPTS="$HADOOP_OPTS # note: $@ only works in "$@" -exec $HADOOP jar $AUX_JARS_CMD_LINE ${HIVE_LIB}/hive_cli.jar $CLASS "$@" +#echo $HADOOP jar $AUX_JARS_CMD_LINE ${HIVE_LIB}/hive_cli.jar $CLASS $HIVE_OPTS "$@" +exec $HADOOP jar $AUX_JARS_CMD_LINE ${HIVE_LIB}/hive_cli.jar $CLASS $HIVE_OPTS "$@" diff --git a/build-common.xml b/build-common.xml index 991659eaa..f5c26eec8 100644 --- a/build-common.xml +++ b/build-common.xml @@ -53,6 +53,7 @@ + @@ -64,6 +65,7 @@ + @@ -95,6 +97,7 @@ + @@ -215,7 +218,7 @@ - + diff --git a/build.xml b/build.xml index a29c75d75..b3ccb5f91 100644 --- a/build.xml +++ b/build.xml @@ -140,9 +140,11 @@ + + diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index c65d6dacc..7b9239397 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -23,6 +23,7 @@ import java.io.*; import java.util.*; +import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter; @@ -35,9 +36,11 @@ public class CliDriver { public final static String prompt = "hive"; + public final static String prompt2 = " "; // when ';' is not yet seen public static SetProcessor sp; public static Driver qp; + public static FsShell dfs; public CliDriver(CliSessionState ss) { SessionState.start(ss); @@ -81,30 +84,46 @@ public static int processCmd(String cmd) { catch (Exception e) { e.printStackTrace(); } - - } - else { + } else if (cmd.startsWith("dfs")) { + // dfs shell commands + SessionState ss = SessionState.get(); + if(dfs == null) + dfs = new FsShell(ss.getConf()); + String hadoopCmd = cmd.replaceFirst("dfs\\s+", ""); + hadoopCmd = hadoopCmd.trim(); + if (hadoopCmd.endsWith(";")) { + hadoopCmd = hadoopCmd.substring(0, hadoopCmd.length()-1); + } + String[] args = hadoopCmd.split("\\s+"); + try { + PrintStream oldOut = System.out; + System.setOut(ss.out); + int val = dfs.run(args); + System.setOut(oldOut); + if (val != 0) { + ss.err.write((new String("Command failed with exit code = " + val)).getBytes()); + } + } catch (Exception e) { + ss.err.println("Exception raised from DFSShell.run " + e.getLocalizedMessage()); + } + } else { ret = qp.run(cmd); - Vector> res = new Vector>(); + Vector res = new Vector(); while (qp.getResults(res)) { - SessionState ss = SessionState.get(); - PrintStream out = ss.out; - - for (Vector row:res) - { - boolean firstCol = true; - for (String col:row) - { - if (!firstCol) - out.write(Utilities.tabCode); - out.print(col == null ? Utilities.nullStringOutput : col); - firstCol = false; - } - out.write(Utilities.newLineCode); - } + for (String r:res) { + SessionState ss = SessionState.get(); + PrintStream out = ss.out; + out.println(r); + } res.clear(); } + + int cret = qp.close(); + if (ret == 0) { + ret = cret; + } } + return ret; } @@ -162,8 +181,15 @@ public static void main(String[] args) throws IOException { System.exit(2); } + // set all properties specified via command line + HiveConf conf = ss.getConf(); + for(Map.Entry item: ss.cmdProperties.entrySet()) { + conf.set((String) item.getKey(), (String) item.getValue()); + } + sp = new SetProcessor(); qp = new Driver(); + dfs = new FsShell(ss.getConf()); if(ss.execString != null) { System.exit(processLine(ss.execString)); @@ -199,9 +225,20 @@ public static void main(String[] args) throws IOException { int ret = 0; Log LOG = LogFactory.getLog("CliDriver"); LogHelper console = new LogHelper(LOG); - while ((line = reader.readLine(prompt+"> ")) != null) { + String prefix = ""; + String curPrompt = prompt; + while ((line = reader.readLine(curPrompt+"> ")) != null) { long start = System.currentTimeMillis(); - ret = processLine(line); + if(line.trim().endsWith(";")) { + line = prefix + " " + line; + ret = processLine(line); + prefix = ""; + curPrompt = prompt; + } else { + prefix = prefix + line; + curPrompt = prompt2; + continue; + } long end = System.currentTimeMillis(); if (end > start) { double timeTaken = (double)(end-start)/1000.0; diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java b/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java index 012a67420..6f173de99 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.cli; +import java.util.Properties; + import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -33,6 +35,11 @@ public class CliSessionState extends SessionState { */ public String fileName; + /** + * properties set from -hiveconf via cmdline + */ + public Properties cmdProperties = new Properties(); + public CliSessionState() { super(); diff --git a/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java b/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java index e466e353f..38af7fca9 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java @@ -200,6 +200,15 @@ public boolean process_stage2(CliSessionState ss) { printUsage("-e and -f option cannot be specified simultaneously"); return false; } + + List hiveConfArgs = (List)cmdLine.getValue(confOptions); + if (null != hiveConfArgs){ + for(String s : hiveConfArgs){ + String []parts = s.split("=", 2); + ss.cmdProperties.setProperty(parts[0], parts[1]); + } + } + return true; } diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index ecf86de42..3ffd24cd9 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -80,6 +80,7 @@ public static enum ConfVars { // for hive script operator HIVETABLENAME("hive.table.name", ""), HIVEPARTITIONNAME("hive.partition.name", ""), + HIVEPARTITIONPRUNER("hive.partition.pruning", "nonstrict"), HIVEALIAS("hive.alias", ""); public final String varname; @@ -170,13 +171,13 @@ private void initialize(Class cls) { // let's add the hive configuration URL hconfurl = getClassLoader().getResource("hive-default.xml"); if(hconfurl == null) { - l4j.warn("Unable to locate default hive configuration"); + l4j.debug("hive-default.xml not found."); } else { addResource(hconfurl); } URL hsiteurl = getClassLoader().getResource("hive-site.xml"); if(hsiteurl == null) { - l4j.warn("Unable to locate hive site configuration"); + l4j.debug("hive-site.xml not found."); } else { addResource(hsiteurl); } diff --git a/conf/hive-default.xml b/conf/hive-default.xml index dc3e1eb84..8eb0941cb 100644 --- a/conf/hive-default.xml +++ b/conf/hive-default.xml @@ -9,6 +9,21 @@ + + + hadoop.bin.path + ${user.dir}/../../../bin/hadoop + + Path to hadoop binary. Assumes that by default we are executing from hive + + + + hadoop.config.dir + ${user.dir}/../../../conf + + Path to hadoop configuration. Again assumes that by default we are executing from hive/ + + hive.exec.scratchdir diff --git a/data/files/kv1_cb.txt b/data/files/kv1_cb.txt new file mode 100644 index 000000000..12b51587c --- /dev/null +++ b/data/files/kv1_cb.txt @@ -0,0 +1,500 @@ +238val_238 +86val_86 +311val_311 +27val_27 +165val_165 +409val_409 +255val_255 +278val_278 +98val_98 +484val_484 +265val_265 +193val_193 +401val_401 +150val_150 +273val_273 +224val_224 +369val_369 +66val_66 +128val_128 +213val_213 +146val_146 +406val_406 +429val_429 +374val_374 +152val_152 +469val_469 +145val_145 +495val_495 +37val_37 +327val_327 +281val_281 +277val_277 +209val_209 +15val_15 +82val_82 +403val_403 +166val_166 +417val_417 +430val_430 +252val_252 +292val_292 +219val_219 +287val_287 +153val_153 +193val_193 +338val_338 +446val_446 +459val_459 +394val_394 +237val_237 +482val_482 +174val_174 +413val_413 +494val_494 +207val_207 +199val_199 +466val_466 +208val_208 +174val_174 +399val_399 +396val_396 +247val_247 +417val_417 +489val_489 +162val_162 +377val_377 +397val_397 +309val_309 +365val_365 +266val_266 +439val_439 +342val_342 +367val_367 +325val_325 +167val_167 +195val_195 +475val_475 +17val_17 +113val_113 +155val_155 +203val_203 +339val_339 +0val_0 +455val_455 +128val_128 +311val_311 +316val_316 +57val_57 +302val_302 +205val_205 +149val_149 +438val_438 +345val_345 +129val_129 +170val_170 +20val_20 +489val_489 +157val_157 +378val_378 +221val_221 +92val_92 +111val_111 +47val_47 +72val_72 +4val_4 +280val_280 +35val_35 +427val_427 +277val_277 +208val_208 +356val_356 +399val_399 +169val_169 +382val_382 +498val_498 +125val_125 +386val_386 +437val_437 +469val_469 +192val_192 +286val_286 +187val_187 +176val_176 +54val_54 +459val_459 +51val_51 +138val_138 +103val_103 +239val_239 +213val_213 +216val_216 +430val_430 +278val_278 +176val_176 +289val_289 +221val_221 +65val_65 +318val_318 +332val_332 +311val_311 +275val_275 +137val_137 +241val_241 +83val_83 +333val_333 +180val_180 +284val_284 +12val_12 +230val_230 +181val_181 +67val_67 +260val_260 +404val_404 +384val_384 +489val_489 +353val_353 +373val_373 +272val_272 +138val_138 +217val_217 +84val_84 +348val_348 +466val_466 +58val_58 +8val_8 +411val_411 +230val_230 +208val_208 +348val_348 +24val_24 +463val_463 +431val_431 +179val_179 +172val_172 +42val_42 +129val_129 +158val_158 +119val_119 +496val_496 +0val_0 +322val_322 +197val_197 +468val_468 +393val_393 +454val_454 +100val_100 +298val_298 +199val_199 +191val_191 +418val_418 +96val_96 +26val_26 +165val_165 +327val_327 +230val_230 +205val_205 +120val_120 +131val_131 +51val_51 +404val_404 +43val_43 +436val_436 +156val_156 +469val_469 +468val_468 +308val_308 +95val_95 +196val_196 +288val_288 +481val_481 +457val_457 +98val_98 +282val_282 +197val_197 +187val_187 +318val_318 +318val_318 +409val_409 +470val_470 +137val_137 +369val_369 +316val_316 +169val_169 +413val_413 +85val_85 +77val_77 +0val_0 +490val_490 +87val_87 +364val_364 +179val_179 +118val_118 +134val_134 +395val_395 +282val_282 +138val_138 +238val_238 +419val_419 +15val_15 +118val_118 +72val_72 +90val_90 +307val_307 +19val_19 +435val_435 +10val_10 +277val_277 +273val_273 +306val_306 +224val_224 +309val_309 +389val_389 +327val_327 +242val_242 +369val_369 +392val_392 +272val_272 +331val_331 +401val_401 +242val_242 +452val_452 +177val_177 +226val_226 +5val_5 +497val_497 +402val_402 +396val_396 +317val_317 +395val_395 +58val_58 +35val_35 +336val_336 +95val_95 +11val_11 +168val_168 +34val_34 +229val_229 +233val_233 +143val_143 +472val_472 +322val_322 +498val_498 +160val_160 +195val_195 +42val_42 +321val_321 +430val_430 +119val_119 +489val_489 +458val_458 +78val_78 +76val_76 +41val_41 +223val_223 +492val_492 +149val_149 +449val_449 +218val_218 +228val_228 +138val_138 +453val_453 +30val_30 +209val_209 +64val_64 +468val_468 +76val_76 +74val_74 +342val_342 +69val_69 +230val_230 +33val_33 +368val_368 +103val_103 +296val_296 +113val_113 +216val_216 +367val_367 +344val_344 +167val_167 +274val_274 +219val_219 +239val_239 +485val_485 +116val_116 +223val_223 +256val_256 +263val_263 +70val_70 +487val_487 +480val_480 +401val_401 +288val_288 +191val_191 +5val_5 +244val_244 +438val_438 +128val_128 +467val_467 +432val_432 +202val_202 +316val_316 +229val_229 +469val_469 +463val_463 +280val_280 +2val_2 +35val_35 +283val_283 +331val_331 +235val_235 +80val_80 +44val_44 +193val_193 +321val_321 +335val_335 +104val_104 +466val_466 +366val_366 +175val_175 +403val_403 +483val_483 +53val_53 +105val_105 +257val_257 +406val_406 +409val_409 +190val_190 +406val_406 +401val_401 +114val_114 +258val_258 +90val_90 +203val_203 +262val_262 +348val_348 +424val_424 +12val_12 +396val_396 +201val_201 +217val_217 +164val_164 +431val_431 +454val_454 +478val_478 +298val_298 +125val_125 +431val_431 +164val_164 +424val_424 +187val_187 +382val_382 +5val_5 +70val_70 +397val_397 +480val_480 +291val_291 +24val_24 +351val_351 +255val_255 +104val_104 +70val_70 +163val_163 +438val_438 +119val_119 +414val_414 +200val_200 +491val_491 +237val_237 +439val_439 +360val_360 +248val_248 +479val_479 +305val_305 +417val_417 +199val_199 +444val_444 +120val_120 +429val_429 +169val_169 +443val_443 +323val_323 +325val_325 +277val_277 +230val_230 +478val_478 +178val_178 +468val_468 +310val_310 +317val_317 +333val_333 +493val_493 +460val_460 +207val_207 +249val_249 +265val_265 +480val_480 +83val_83 +136val_136 +353val_353 +172val_172 +214val_214 +462val_462 +233val_233 +406val_406 +133val_133 +175val_175 +189val_189 +454val_454 +375val_375 +401val_401 +421val_421 +407val_407 +384val_384 +256val_256 +26val_26 +134val_134 +67val_67 +384val_384 +379val_379 +18val_18 +462val_462 +492val_492 +100val_100 +298val_298 +9val_9 +341val_341 +498val_498 +146val_146 +458val_458 +362val_362 +186val_186 +285val_285 +348val_348 +167val_167 +18val_18 +273val_273 +183val_183 +281val_281 +344val_344 +97val_97 +469val_469 +315val_315 +84val_84 +28val_28 +37val_37 +448val_448 +152val_152 +348val_348 +307val_307 +194val_194 +414val_414 +477val_477 +222val_222 +126val_126 +90val_90 +169val_169 +403val_403 +400val_400 +200val_200 +97val_97 diff --git a/data/files/kv1_cc.txt b/data/files/kv1_cc.txt new file mode 100644 index 000000000..f30077052 --- /dev/null +++ b/data/files/kv1_cc.txt @@ -0,0 +1,500 @@ +238val_238 +86val_86 +311val_311 +27val_27 +165val_165 +409val_409 +255val_255 +278val_278 +98val_98 +484val_484 +265val_265 +193val_193 +401val_401 +150val_150 +273val_273 +224val_224 +369val_369 +66val_66 +128val_128 +213val_213 +146val_146 +406val_406 +429val_429 +374val_374 +152val_152 +469val_469 +145val_145 +495val_495 +37val_37 +327val_327 +281val_281 +277val_277 +209val_209 +15val_15 +82val_82 +403val_403 +166val_166 +417val_417 +430val_430 +252val_252 +292val_292 +219val_219 +287val_287 +153val_153 +193val_193 +338val_338 +446val_446 +459val_459 +394val_394 +237val_237 +482val_482 +174val_174 +413val_413 +494val_494 +207val_207 +199val_199 +466val_466 +208val_208 +174val_174 +399val_399 +396val_396 +247val_247 +417val_417 +489val_489 +162val_162 +377val_377 +397val_397 +309val_309 +365val_365 +266val_266 +439val_439 +342val_342 +367val_367 +325val_325 +167val_167 +195val_195 +475val_475 +17val_17 +113val_113 +155val_155 +203val_203 +339val_339 +0val_0 +455val_455 +128val_128 +311val_311 +316val_316 +57val_57 +302val_302 +205val_205 +149val_149 +438val_438 +345val_345 +129val_129 +170val_170 +20val_20 +489val_489 +157val_157 +378val_378 +221val_221 +92val_92 +111val_111 +47val_47 +72val_72 +4val_4 +280val_280 +35val_35 +427val_427 +277val_277 +208val_208 +356val_356 +399val_399 +169val_169 +382val_382 +498val_498 +125val_125 +386val_386 +437val_437 +469val_469 +192val_192 +286val_286 +187val_187 +176val_176 +54val_54 +459val_459 +51val_51 +138val_138 +103val_103 +239val_239 +213val_213 +216val_216 +430val_430 +278val_278 +176val_176 +289val_289 +221val_221 +65val_65 +318val_318 +332val_332 +311val_311 +275val_275 +137val_137 +241val_241 +83val_83 +333val_333 +180val_180 +284val_284 +12val_12 +230val_230 +181val_181 +67val_67 +260val_260 +404val_404 +384val_384 +489val_489 +353val_353 +373val_373 +272val_272 +138val_138 +217val_217 +84val_84 +348val_348 +466val_466 +58val_58 +8val_8 +411val_411 +230val_230 +208val_208 +348val_348 +24val_24 +463val_463 +431val_431 +179val_179 +172val_172 +42val_42 +129val_129 +158val_158 +119val_119 +496val_496 +0val_0 +322val_322 +197val_197 +468val_468 +393val_393 +454val_454 +100val_100 +298val_298 +199val_199 +191val_191 +418val_418 +96val_96 +26val_26 +165val_165 +327val_327 +230val_230 +205val_205 +120val_120 +131val_131 +51val_51 +404val_404 +43val_43 +436val_436 +156val_156 +469val_469 +468val_468 +308val_308 +95val_95 +196val_196 +288val_288 +481val_481 +457val_457 +98val_98 +282val_282 +197val_197 +187val_187 +318val_318 +318val_318 +409val_409 +470val_470 +137val_137 +369val_369 +316val_316 +169val_169 +413val_413 +85val_85 +77val_77 +0val_0 +490val_490 +87val_87 +364val_364 +179val_179 +118val_118 +134val_134 +395val_395 +282val_282 +138val_138 +238val_238 +419val_419 +15val_15 +118val_118 +72val_72 +90val_90 +307val_307 +19val_19 +435val_435 +10val_10 +277val_277 +273val_273 +306val_306 +224val_224 +309val_309 +389val_389 +327val_327 +242val_242 +369val_369 +392val_392 +272val_272 +331val_331 +401val_401 +242val_242 +452val_452 +177val_177 +226val_226 +5val_5 +497val_497 +402val_402 +396val_396 +317val_317 +395val_395 +58val_58 +35val_35 +336val_336 +95val_95 +11val_11 +168val_168 +34val_34 +229val_229 +233val_233 +143val_143 +472val_472 +322val_322 +498val_498 +160val_160 +195val_195 +42val_42 +321val_321 +430val_430 +119val_119 +489val_489 +458val_458 +78val_78 +76val_76 +41val_41 +223val_223 +492val_492 +149val_149 +449val_449 +218val_218 +228val_228 +138val_138 +453val_453 +30val_30 +209val_209 +64val_64 +468val_468 +76val_76 +74val_74 +342val_342 +69val_69 +230val_230 +33val_33 +368val_368 +103val_103 +296val_296 +113val_113 +216val_216 +367val_367 +344val_344 +167val_167 +274val_274 +219val_219 +239val_239 +485val_485 +116val_116 +223val_223 +256val_256 +263val_263 +70val_70 +487val_487 +480val_480 +401val_401 +288val_288 +191val_191 +5val_5 +244val_244 +438val_438 +128val_128 +467val_467 +432val_432 +202val_202 +316val_316 +229val_229 +469val_469 +463val_463 +280val_280 +2val_2 +35val_35 +283val_283 +331val_331 +235val_235 +80val_80 +44val_44 +193val_193 +321val_321 +335val_335 +104val_104 +466val_466 +366val_366 +175val_175 +403val_403 +483val_483 +53val_53 +105val_105 +257val_257 +406val_406 +409val_409 +190val_190 +406val_406 +401val_401 +114val_114 +258val_258 +90val_90 +203val_203 +262val_262 +348val_348 +424val_424 +12val_12 +396val_396 +201val_201 +217val_217 +164val_164 +431val_431 +454val_454 +478val_478 +298val_298 +125val_125 +431val_431 +164val_164 +424val_424 +187val_187 +382val_382 +5val_5 +70val_70 +397val_397 +480val_480 +291val_291 +24val_24 +351val_351 +255val_255 +104val_104 +70val_70 +163val_163 +438val_438 +119val_119 +414val_414 +200val_200 +491val_491 +237val_237 +439val_439 +360val_360 +248val_248 +479val_479 +305val_305 +417val_417 +199val_199 +444val_444 +120val_120 +429val_429 +169val_169 +443val_443 +323val_323 +325val_325 +277val_277 +230val_230 +478val_478 +178val_178 +468val_468 +310val_310 +317val_317 +333val_333 +493val_493 +460val_460 +207val_207 +249val_249 +265val_265 +480val_480 +83val_83 +136val_136 +353val_353 +172val_172 +214val_214 +462val_462 +233val_233 +406val_406 +133val_133 +175val_175 +189val_189 +454val_454 +375val_375 +401val_401 +421val_421 +407val_407 +384val_384 +256val_256 +26val_26 +134val_134 +67val_67 +384val_384 +379val_379 +18val_18 +462val_462 +492val_492 +100val_100 +298val_298 +9val_9 +341val_341 +498val_498 +146val_146 +458val_458 +362val_362 +186val_186 +285val_285 +348val_348 +167val_167 +18val_18 +273val_273 +183val_183 +281val_281 +344val_344 +97val_97 +469val_469 +315val_315 +84val_84 +28val_28 +37val_37 +448val_448 +152val_152 +348val_348 +307val_307 +194val_194 +414val_414 +477val_477 +222val_222 +126val_126 +90val_90 +169val_169 +403val_403 +400val_400 +200val_200 +97val_97 diff --git a/data/files/kv4.txt b/data/files/kv4.txt new file mode 100644 index 000000000..f969221c1 --- /dev/null +++ b/data/files/kv4.txt @@ -0,0 +1 @@ +邵铮 \ No newline at end of file diff --git a/metastore/if/hive_metastore.thrift b/metastore/if/hive_metastore.thrift index af26fafe5..730e90498 100755 --- a/metastore/if/hive_metastore.thrift +++ b/metastore/if/hive_metastore.thrift @@ -161,8 +161,9 @@ service ThriftHiveMetastore extends fb303.FacebookService // delete data (including partitions) if deleteData is set to true void drop_table(1:string dbname, 2:string name, 3:bool deleteData) throws(1:NoSuchObjectException o1, 2:MetaException o3) - list get_tables(string db_name, string pattern) - throws (MetaException o1, UnknownTableException o2, UnknownDBException o3) + list get_tables(string db_name, string pattern) + throws (MetaException o1) + Table get_table(1:string dbname, 2:string tbl_name) throws (1:MetaException o1, 2:NoSuchObjectException o2) // alter table applies to only future partitions not for existing partitions diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index c6ea2ad1e..a0e086491 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -77,6 +77,7 @@ protected synchronized Object initialValue() { }; // The next serial number to be assigned + private boolean checkForDefaultDb; private static int nextSerialNum = 0; private static ThreadLocal threadLocalId = new ThreadLocal() { protected synchronized Object initialValue() { @@ -109,6 +110,7 @@ public HMSHandler(String name, HiveConf conf) throws MetaException { private boolean init() throws MetaException { rawStoreClassName = hiveConf.get("hive.metastore.rawstore.impl"); + checkForDefaultDb = hiveConf.getBoolean("hive.metastore.checkForDefaultDb", true); wh = new Warehouse(hiveConf); createDefaultDB(); return true; @@ -134,7 +136,7 @@ private RawStore getMS() throws MetaException { * @throws MetaException */ private void createDefaultDB() throws MetaException { - if(HMSHandler.createDefaultDB) { + if(HMSHandler.createDefaultDB || !checkForDefaultDb) { return; } try { @@ -358,6 +360,11 @@ public Partition append_partition(String dbName, String tableName, List throws InvalidObjectException, AlreadyExistsException, MetaException { this.incrementCounter("append_partition"); logStartFunction("append_partition", dbName, tableName); + if(LOG.isDebugEnabled()) { + for (String part : part_vals) { + LOG.debug(part); + } + } Partition part = new Partition(); boolean success = false; try { @@ -367,19 +374,20 @@ public Partition append_partition(String dbName, String tableName, List part.setTableName(tableName); part.setValues(part_vals); - Partition old_part = this.get_partition(part.getDbName(), part.getTableName(), part.getValues()); - if( old_part != null) { - throw new AlreadyExistsException("Partition already exists:" + part); - } - Table tbl = getMS().getTable(part.getDbName(), part.getTableName()); if(tbl == null) { throw new InvalidObjectException("Unable to add partition because table or database do not exist"); } + part.setSd(tbl.getSd()); Path partLocation = new Path(tbl.getSd().getLocation(), Warehouse.makePartName(tbl.getPartitionKeys(), part_vals)); part.getSd().setLocation(partLocation.toString()); + Partition old_part = this.get_partition(part.getDbName(), part.getTableName(), part.getValues()); + if( old_part != null) { + throw new AlreadyExistsException("Partition already exists:" + part); + } + success = getMS().addPartition(part); if(success) { success = getMS().commitTransaction(); @@ -454,8 +462,32 @@ public boolean drop_partition(String db_name, String tbl_name, List part TException { this.incrementCounter("drop_partition"); logStartFunction("drop_partition", db_name, tbl_name); - // TODO:pc drop the data as needed - return getMS().dropPartition(db_name, tbl_name, part_vals); + LOG.info("Partition values:" + part_vals); + boolean success = false; + Path partPath = null; + try { + getMS().openTransaction(); + Partition part = this.get_partition(db_name, tbl_name, part_vals); + if(part == null) { + throw new NoSuchObjectException("Partition doesn't exist. " + part_vals); + } + if(part.getSd() == null || part.getSd().getLocation() == null) { + throw new MetaException("Partition metadata is corrupted"); + } + if(!getMS().dropPartition(db_name, tbl_name, part_vals)) { + throw new MetaException("Unable to drop partition"); + } + success = getMS().commitTransaction(); + partPath = new Path(part.getSd().getLocation()); + } finally { + if(!success) { + getMS().rollbackTransaction(); + } else if(deleteData && (partPath != null)) { + wh.deleteDir(partPath, true); + // ok even if the data is not deleted + } + } + return true; } public Partition get_partition(String db_name, String tbl_name, List part_vals) diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 77e58fa80..58e920b67 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -142,12 +142,13 @@ private void open() throws MetaException { if(!open) { throw new MetaException("Could not connect to meta store using any of the URIs provided"); } + LOG.info("Connected to metastore."); } private void openStore(URI store) throws MetaException { open = false; transport = new TSocket(store.getHost(), store.getPort()); - ((TSocket)transport).setTimeout(2000); + ((TSocket)transport).setTimeout(20000); TProtocol protocol = new TBinaryProtocol(transport); client = new ThriftHiveMetastore.Client(protocol); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index 27d0c051e..72983f9c2 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -108,4 +108,18 @@ public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectE public void alter_table(String defaultDatabaseName, String tblName, Table table) throws InvalidOperationException, MetaException, TException; public boolean createDatabase(String name, String location_uri) throws AlreadyExistsException, MetaException, TException; public boolean dropDatabase(String name) throws MetaException, TException; + + /** + * @param db_name + * @param tbl_name + * @param part_vals + * @param deleteData delete the underlying data or just delete the table in metadata + * @return + * @throws NoSuchObjectException + * @throws MetaException + * @throws TException + * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String, java.lang.String, java.util.List) + */ + public boolean dropPartition(String db_name, String tbl_name, List part_vals, boolean deleteData) + throws NoSuchObjectException, MetaException, TException; } \ No newline at end of file diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java index f95a9faa6..6250b1d5a 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java @@ -439,4 +439,9 @@ public List listPartitionNames(String db_name, String tbl_name, short ma return new ArrayList(); } + public boolean dropPartition(String db_name, String tbl_name, List part_vals, boolean deleteData) + throws NoSuchObjectException, MetaException, TException { + return false; + } + } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 701e5d4f4..d396b9f1a 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -102,7 +102,7 @@ public static Table createColumnsetSchema(String name, List columns, Lis tTable.getPartitionKeys().add(part); } // not sure why these are needed - serdeInfo.setSerializationLib(MetadataTypedColumnsetSerDe.shortName()); + serdeInfo.setSerializationLib(MetadataTypedColumnsetSerDe.class.getName()); sd.setNumBuckets(-1); return tTable; } @@ -266,14 +266,22 @@ public static Properties hive1Tohive3ClassNames(Properties p) { oldName = oldName.replace("com.facebook.thrift.hive.MetadataTypedColumnsetSerDe",org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName()); // columnset serde - oldName = oldName.replace("com.facebook.thrift.hive.columnsetSerDe",org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName()); + oldName = oldName.replace("com.facebook.thrift.hive.columnsetSerDe",org.apache.hadoop.hive.serde.thrift.columnsetSerDe.class.getName()); oldName = oldName.replace("org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe", - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName()); - + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName()); + oldName = oldName.replace("com.facebook.thrift.hive.MetadataTypedColumnsetSerDe", org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName()); // thrift serde - oldName = oldName.replace("com.facebook.thrift.hive.ThriftHiveSerDe",org.apache.hadoop.hive.serde2.ThriftDeserializer.class.getName()); - oldName = oldName.replace("org.apache.hadoop.hive.serde.thrift.ThriftSerDe", - org.apache.hadoop.hive.serde2.ThriftDeserializer.class.getName()); + oldName = oldName.replace("com.facebook.thrift.hive.ThriftHiveSerDe", org.apache.hadoop.hive.serde2.ThriftDeserializer.class.getName()); + oldName = oldName.replace("org.apache.hadoop.hive.serde.thrift.ThriftSerDe", org.apache.hadoop.hive.serde2.ThriftDeserializer.class.getName()); + + // replace any old short names in filebased metadata + if(oldName.equals("columnset")) + oldName = org.apache.hadoop.hive.serde.thrift.columnsetSerDe.class.getName(); + if(oldName.equals("simple_meta")) + oldName = org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName(); + if(oldName.equals("thrift")) + oldName = org.apache.hadoop.hive.serde2.ThriftDeserializer.class.getName(); + p.setProperty(key,oldName); } return p; @@ -328,6 +336,8 @@ public static Table getTable(Configuration conf, Properties schema) throws MetaE } // needed for MetadataTypedColumnSetSerDe setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS); + // needed for DynamicSerDe + setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL); String colstr = schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS); List fields = new ArrayList(); @@ -350,6 +360,8 @@ public static Table getTable(Configuration conf, Properties schema) throws MetaE } t.getSd().setCols(fields); + t.setOwner(schema.getProperty("creator")); + // remove all the used up parameters to find out the remaining parameters schema.remove(Constants.META_TABLE_NAME); schema.remove(Constants.META_TABLE_LOCATION); @@ -373,13 +385,70 @@ public static Table getTable(Configuration conf, Properties schema) throws MetaE return t; } - private static void setSerdeParam(SerDeInfo sdi, Properties schema, String param) { + public static void setSerdeParam(SerDeInfo sdi, Properties schema, String param) { String val = schema.getProperty(param); if(org.apache.commons.lang.StringUtils.isNotBlank(val)) { sdi.getParameters().put(param, val); } } + static HashMap typeToThriftTypeMap; + static { + typeToThriftTypeMap = new HashMap(); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.TINYINT_TYPE_NAME, "byte"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.INT_TYPE_NAME, "i32"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.BIGINT_TYPE_NAME, "i64"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.DOUBLE_TYPE_NAME, "double"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.FLOAT_TYPE_NAME, "float"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.LIST_TYPE_NAME, "list"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.MAP_TYPE_NAME, "map"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME, "string"); + // These 3 types are not supported yet. + // We should define a complex type date in thrift that contains a single int member, and DynamicSerDe + // should convert it to date type at runtime. + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.DATE_TYPE_NAME, "date"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.DATETIME_TYPE_NAME, "datetime"); + typeToThriftTypeMap.put(org.apache.hadoop.hive.serde.Constants.TIMESTAMP_TYPE_NAME, "timestamp"); + } + /** Convert type to ThriftType. We do that by tokenizing the type and convert each token. + */ + public static String typeToThriftType(String type) { + StringBuilder thriftType = new StringBuilder(); + int last = 0; + boolean lastAlphaDigit = Character.isLetterOrDigit(type.charAt(last)); + for(int i=1; i<=type.length(); i++) { + if (i == type.length() || Character.isLetterOrDigit(type.charAt(i)) != lastAlphaDigit) { + String token = type.substring(last, i); + last = i; + String thriftToken = typeToThriftTypeMap.get(token); + thriftType.append(thriftToken == null? token : thriftToken); + lastAlphaDigit = !lastAlphaDigit; + } + } + return thriftType.toString(); + } + /** Convert FieldSchemas to Thrift DDL. + */ + public static String getDDLFromFieldSchema(String structName, List fieldSchemas) { + StringBuilder ddl = new StringBuilder(); + ddl.append("struct "); + ddl.append(structName); + ddl.append(" { "); + boolean first = true; + for (FieldSchema col: fieldSchemas) { + if (first) { + first = false; + } else { + ddl.append(", "); + } + ddl.append(typeToThriftType(col.getType())); + ddl.append(' '); + ddl.append(col.getName()); + } + ddl.append("}"); + LOG.warn("DDL: " + ddl); + return ddl.toString(); + } public static Properties getSchema(org.apache.hadoop.hive.metastore.api.Table tbl) { Properties schema = new Properties(); String inputFormat = tbl.getSd().getInputFormat(); @@ -404,7 +473,7 @@ public static Properties getSchema(org.apache.hadoop.hive.metastore.api.Table tb if(tbl.getSd().getSerdeInfo().getSerializationLib() != null) { schema.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB, tbl.getSd().getSerdeInfo().getSerializationLib()); } - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); boolean first = true; for (FieldSchema col: tbl.getSd().getCols()) { if (!first) { @@ -415,6 +484,8 @@ public static Properties getSchema(org.apache.hadoop.hive.metastore.api.Table tb } String cols = buf.toString(); schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS, cols); + schema.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL, + getDDLFromFieldSchema(tbl.getTableName(), tbl.getSd().getCols())); String partString = ""; String partStringSep = ""; @@ -429,10 +500,6 @@ public static Properties getSchema(org.apache.hadoop.hive.metastore.api.Table tb schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS, partString); } - //TODO:pc field_to_dimension doesn't seem to be used anywhere so skipping for now - schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.BUCKET_FIELD_NAME, ""); - schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.FIELD_TO_DIMENSION, ""); - for(Entry e: tbl.getParameters().entrySet()) { schema.setProperty(e.getKey(), e.getValue()); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 6a5d89b58..b25368d84 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -528,11 +528,24 @@ public List getTables(String dbName, String pattern) throws MetaExceptio try { openTransaction(); dbName = dbName.toLowerCase(); - pattern = "(?i)" + pattern; // add the case insensitivity - Query q = pm.newQuery("select tableName from org.apache.hadoop.hive.metastore.model.MTable where database.name == dbName && tableName.matches(pattern)"); - q.declareParameters("java.lang.String dbName, java.lang.String pattern"); + // Take the pattern and split it on the | to get all the composing patterns + String [] subpatterns = pattern.trim().split("\\|"); + String query = "select tableName from org.apache.hadoop.hive.metastore.model.MTable where database.name == dbName && ("; + boolean first = true; + for(String subpattern: subpatterns) { + subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*"); + if (!first) { + query = query + " || "; + } + query = query + " tableName.matches(\"" + subpattern + "\")"; + first = false; + } + query = query + ")"; + + Query q = pm.newQuery(query); + q.declareParameters("java.lang.String dbName"); q.setResult("tableName"); - Collection names = (Collection) q.execute(dbName.trim(), pattern.trim()); + Collection names = (Collection) q.execute(dbName.trim()); tbls = new ArrayList(); for (Iterator i = names.iterator (); i.hasNext ();) { tbls.add((String) i.next ()); @@ -817,7 +830,7 @@ public List listPartitionNames(String dbName, String tableName, short ma LOG.debug("Executing getPartitionNames"); dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); - Query q = pm.newQuery("select partitionName from org.apache.hadoop.hive.metastore.model.MPartition where table.database.name == t1 && table.tableName == t2"); + Query q = pm.newQuery("select partitionName from org.apache.hadoop.hive.metastore.model.MPartition where table.database.name == t1 && table.tableName == t2 order by partitionName asc"); q.declareParameters("java.lang.String t1, java.lang.String t2"); q.setResult("partitionName"); Collection names = (Collection) q.execute(dbName.trim(), tableName.trim()); @@ -847,9 +860,10 @@ private List listMPartitions(String dbName, String tableName, int ma Query query = pm.newQuery(MPartition.class, "table.tableName == t1 && table.database.name == t2"); query.declareParameters("java.lang.String t1, java.lang.String t2"); mparts = (List) query.execute(tableName.trim(), dbName.trim()); + LOG.debug("Done executing query for listMPartitions"); pm.retrieveAll(mparts); success = commitTransaction(); - LOG.debug("Done e xecuting listMPartitions"); + LOG.debug("Done retrieving all objects for listMPartitions"); } finally { if(!success) { rollbackTransaction(); diff --git a/ql/build.xml b/ql/build.xml index eac25ebc5..7b9a62fda 100644 --- a/ql/build.xml +++ b/ql/build.xml @@ -64,6 +64,13 @@ resultsDirectory="${ql.test.results.dir}/clientpositive" className="TestCliDriver" logFile="${test.log.dir}/testclidrivergen.log"/> + + diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java index ef7f6daa3..10fed3fe9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql; +import java.io.File; import java.io.DataInput; import java.io.IOException; import java.io.FileNotFoundException; @@ -29,6 +30,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.util.StringUtils; +import java.util.Random; public class Context { private Path resFile; @@ -38,9 +40,12 @@ public class Context { private Path[] resDirPaths; private int resDirFilesNum; boolean initialized; + private String scratchDir; + private HiveConf conf; public Context(HiveConf conf) { try { + this.conf = conf; fs = FileSystem.get(conf); initialized = false; resDir = null; @@ -50,6 +55,23 @@ public Context(HiveConf conf) { } } + public void makeScratchDir() throws Exception { + Random rand = new Random(); + int randomid = Math.abs(rand.nextInt()%rand.nextInt()); + scratchDir = conf.getVar(HiveConf.ConfVars.SCRATCHDIR) + File.separator + randomid; + Path tmpdir = new Path(scratchDir); + fs.mkdirs(tmpdir); + } + + public String getScratchDir() { + return scratchDir; + } + + public void removeScratchDir() throws Exception { + Path tmpdir = new Path(scratchDir); + fs.delete(tmpdir, true); + } + /** * @return the resFile */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index c830da242..227cdda2d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -34,10 +34,8 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.serde.ByteStream; import org.apache.hadoop.hive.conf.HiveConf; @@ -55,13 +53,14 @@ public class Driver implements CommandProcessor { private DataInput resStream; private LogHelper console; private Context ctx; + private BaseSemanticAnalyzer sem; public int countJobs(List> tasks) { if (tasks == null) return 0; int jobs = 0; for (Task task: tasks) { - if ((task instanceof ExecDriver) || (task instanceof MapRedTask)) { + if (task.isMapRedTask()) { jobs++; } jobs += countJobs(task.getChildTasks()); @@ -69,6 +68,22 @@ public int countJobs(List> tasks) { return jobs; } + public boolean hasReduceTasks(List> tasks) { + if (tasks == null) + return false; + + boolean hasReduce = false; + for (Task task: tasks) { + if (task.hasReduce()) { + return true; + } + + hasReduce = (hasReduce || hasReduceTasks(task.getChildTasks())); + } + return hasReduce; + } + + /** * for backwards compatibility with current tests */ @@ -97,11 +112,10 @@ public int run(String command) { try { TaskFactory.resetId(); - - BaseSemanticAnalyzer sem; LOG.info("Starting command: " + command); ctx.clear(); + ctx.makeScratchDir(); resStream = null; pd = new ParseDriver(); @@ -122,12 +136,18 @@ public int run(String command) { console.printInfo("Total MapReduce jobs = " + jobs); } - + boolean hasReduce = hasReduceTasks(sem.getRootTasks()); + if (hasReduce) { + console.printInfo("Number of reducers = " + conf.getIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS)); + console.printInfo("In order to change numer of reducers use:"); + console.printInfo(" set mapred.reduce.tasks = "); + } + String jobname = Utilities.abbreviate(command, maxlen - 6); int curJob = 0; for(Task rootTask: sem.getRootTasks()) { // assumption that only top level tasks are map-reduce tasks - if ((rootTask instanceof ExecDriver) || (rootTask instanceof MapRedTask)) { + if (rootTask.isMapRedTask()) { curJob ++; if(noName) { conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname + "(" + curJob + "/" + jobs + ")"); @@ -175,10 +195,10 @@ public int run(String command) { } } } catch (SemanticException e) { - console.printError("FAILED: Error in semantic analysis: " + e.getMessage()); + console.printError("FAILED: Error in semantic analysis: " + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (10); } catch (ParseException e) { - console.printError("FAILED: Parse Error: " + e.getMessage()); + console.printError("FAILED: Parse Error: " + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (11); } catch (Exception e) { // Has to use full name to make sure it does not conflict with org.apache.commons.lang.StringUtils @@ -196,14 +216,23 @@ public int run(String command) { } - public boolean getResults(Vector> res) + public boolean getResults(Vector res) { + if (sem.getFetchTask() != null) { + if (!sem.getFetchTaskInit()) { + sem.setFetchTaskInit(true); + sem.getFetchTask().initialize(conf); + } + boolean ret = sem.getFetchTask().fetch(res); + return ret; + } + if (resStream == null) resStream = ctx.getStream(); if (resStream == null) return false; int numRows = 0; - Vector row = new Vector(); + String row = null; while (numRows < MAX_ROWS) { @@ -215,47 +244,45 @@ public boolean getResults(Vector> res) return false; } - String col = null; bos.reset(); - Utilities.streamStatus ss = Utilities.streamStatus.NORMAL; + Utilities.streamStatus ss; try { ss = Utilities.readColumn(resStream, bos); if (bos.getCount() > 0) - col = new String(bos.getData(), 0, bos.getCount(), "UTF-8"); - else if (ss == Utilities.streamStatus.NORMAL) - col = Utilities.NSTR; + row = new String(bos.getData(), 0, bos.getCount(), "UTF-8"); + else if (ss == Utilities.streamStatus.TERMINATED) + row = new String(); + + if (row != null) { + numRows++; + res.add(row); + } } catch (IOException e) { console.printError("FAILED: Unexpected IO exception : " + e.getMessage()); res = null; return false; } - - if ((ss == Utilities.streamStatus.EOF) || - (ss == Utilities.streamStatus.TERMINATED)) - { - if (col != null) - row.add(col.equals(Utilities.nullStringStorage) ? null : col); - else if (row.size() != 0) - row.add(null); - - numRows++; - res.add(row); - row = new Vector(); - col = null; - - if (ss == Utilities.streamStatus.EOF) - resStream = ctx.getStream(); - } - else if (ss == Utilities.streamStatus.NORMAL) - { - row.add(col.equals(Utilities.nullStringStorage) ? null : col); - col = null; - } - else - assert false; + + if (ss == Utilities.streamStatus.EOF) + resStream = ctx.getStream(); } return true; } + + public int close() { + try { + // Delete the scratch directory from the context + ctx.removeScratchDir(); + ctx.clear(); + } + catch (Exception e) { + console.printError("FAILED: Unknown exception : " + e.getMessage(), + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); + return(13); + } + + return(0); + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java index a632f3245..ae2c56a00 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java @@ -48,16 +48,14 @@ public class ColumnInfo implements Serializable { public ColumnInfo() { } - public ColumnInfo(String internalName, TypeInfo type, boolean isVirtual) { + public ColumnInfo(String internalName, TypeInfo type) { this.internalName = internalName; this.type = type; - this.isVirtual = isVirtual; } - public ColumnInfo(String internalName, Class type, boolean isVirtual) { + public ColumnInfo(String internalName, Class type) { this.internalName = internalName; this.type = TypeInfoFactory.getPrimitiveTypeInfo(type); - this.isVirtual = isVirtual; } public TypeInfo getType() { @@ -67,10 +65,6 @@ public TypeInfo getType() { public String getInternalName() { return internalName; } - - public boolean getIsVirtual() { - return isVirtual; - } public void setType(TypeInfo type) { this.type = type; @@ -79,9 +73,4 @@ public void setType(TypeInfo type) { public void setInternalName(String internalName) { this.internalName = internalName; } - - public void setIsVirtual(boolean isVirtual) { - this.isVirtual = isVirtual; - } - } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 040dd84b1..4d3c57a38 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -22,39 +22,46 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; -import org.apache.hadoop.fs.FileSystem; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.InvalidTableException; +import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.alterTableDesc; import org.apache.hadoop.hive.ql.plan.createTableDesc; import org.apache.hadoop.hive.ql.plan.descTableDesc; import org.apache.hadoop.hive.ql.plan.dropTableDesc; +import org.apache.hadoop.hive.ql.plan.showPartitionsDesc; import org.apache.hadoop.hive.ql.plan.showTablesDesc; +import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.thrift.columnsetSerDe; +import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; -import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.hive.ql.metadata.InvalidTableException; -import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.serde.Constants; -import org.apache.hadoop.util.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import com.facebook.thrift.TException; /** @@ -66,7 +73,7 @@ public class DDLTask extends Task implements Serializable { static final private Log LOG = LogFactory.getLog("hive.ql.exec.DDLTask"); transient HiveConf conf; - static final private int separator = Utilities.ctrlaCode; + static final private int separator = Utilities.tabCode; static final private int terminator = Utilities.newLineCode; public void initialize(HiveConf conf) { @@ -98,14 +105,45 @@ public int execute() { tbl.setPartCols(crtTbl.getPartCols()); if (crtTbl.getNumBuckets() != -1) tblStorDesc.setNumBuckets(crtTbl.getNumBuckets()); - if (crtTbl.getFieldDelim() != null) - tbl.setSerdeParam(Constants.FIELD_DELIM, crtTbl.getFieldDelim()); - if (crtTbl.getCollItemDelim() != null) - tbl.setSerdeParam(Constants.COLLECTION_DELIM, crtTbl.getCollItemDelim()); - if (crtTbl.getMapKeyDelim() != null) - tbl.setSerdeParam(Constants.MAPKEY_DELIM, crtTbl.getMapKeyDelim()); - if (crtTbl.getLineDelim() != null) - tbl.setSerdeParam(Constants.LINE_DELIM, crtTbl.getLineDelim()); + + if (crtTbl.getSerName() != null) { + tbl.setSerializationLib(crtTbl.getSerName()); + if (crtTbl.getMapProp() != null) { + Iterator> iter = crtTbl.getMapProp().entrySet().iterator(); + while (iter.hasNext()) { + Map.Entry m = (Map.Entry)iter.next(); + tbl.setSerdeParam(m.getKey(), m.getValue()); + } + } + } + else + { + if (crtTbl.getFieldDelim() != null) + { + tbl.setSerdeParam(Constants.FIELD_DELIM, crtTbl.getFieldDelim()); + tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, crtTbl.getFieldDelim()); + } + + if (crtTbl.getCollItemDelim() != null) + tbl.setSerdeParam(Constants.COLLECTION_DELIM, crtTbl.getCollItemDelim()); + if (crtTbl.getMapKeyDelim() != null) + tbl.setSerdeParam(Constants.MAPKEY_DELIM, crtTbl.getMapKeyDelim()); + if (crtTbl.getLineDelim() != null) + tbl.setSerdeParam(Constants.LINE_DELIM, crtTbl.getLineDelim()); + } + + /** + * For now, if the user specifies either the map or the collections delimiter, we infer the + * table to DynamicSerDe/TCTLSeparatedProtocol. + * In the future, we should infer this for any delimiters specified, but this will break older + * hive tables, so not for now. + */ + if (crtTbl.getCollItemDelim() != null || crtTbl.getMapKeyDelim() != null) { + tbl.setSerializationLib(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class.getName()); + tbl.setSerdeParam(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); + } + + if (crtTbl.getComment() != null) tbl.setProperty("comment", crtTbl.getComment()); if (crtTbl.getLocation() != null) @@ -157,6 +195,11 @@ public int execute() { tbl.setProperty("SORTBUCKETCOLSPREFIX", "TRUE"); } } + + // set owner, create_time etc + tbl.setOwner(System.getProperty("user.name")); + // set create time + tbl.getTTable().setCreateTime((int) (System.currentTimeMillis()/1000)); // create the table db.createTable(tbl); @@ -165,8 +208,30 @@ public int execute() { dropTableDesc dropTbl = work.getDropTblDesc(); if (dropTbl != null) { - // drop the table - db.dropTable(dropTbl.getTableName()); + if(dropTbl.getPartSpecs() == null) { + // drop the table + db.dropTable(dropTbl.getTableName()); + } else { + // drop partitions in the list + Table tbl = db.getTable(dropTbl.getTableName()); + List parts = new ArrayList(); + for(HashMap partSpec : dropTbl.getPartSpecs()) { + Partition part = db.getPartition(tbl, partSpec, false); + if(part == null) { + console.printInfo("Partition " + partSpec + " does not exist."); + } else { + parts.add(part); + } + } + // drop all existing partitions from the list + for (Partition partition : parts) { + console.printInfo("Dropping the partition " + partition.getName()); + db.dropPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, + dropTbl.getTableName(), + partition.getValues(), + true); //drop data for the partition + } + } return 0; } @@ -174,10 +239,55 @@ public int execute() { if (alterTbl != null) { // alter the table Table tbl = db.getTable(alterTbl.getOldName()); - if (alterTbl.getOp() == alterTableDesc.alterTableTypes.RENAME) - tbl.getTTable().setTableName(alterTbl.getNewName()); - else - tbl.getTTable().getSd().setCols(alterTbl.getNewCols()); + if (alterTbl.getOp() == alterTableDesc.alterTableTypes.RENAME) + tbl.getTTable().setTableName(alterTbl.getNewName()); + else if(alterTbl.getOp() == alterTableDesc.alterTableTypes.ADDCOLS) { + List newCols = alterTbl.getNewCols(); + List oldCols = tbl.getCols(); + if(tbl.getSerializationLib().equals(columnsetSerDe.class.getName())) { + console.printInfo("Replacing columns for columnsetSerDe and changing to typed SerDe"); + tbl.setSerializationLib(MetadataTypedColumnsetSerDe.class.getName()); + tbl.getTTable().getSd().setCols(newCols); + } + else { + // make sure the columns does not already exist + Iterator iterNewCols = newCols.iterator(); + while (iterNewCols.hasNext()) { + FieldSchema newCol = iterNewCols.next(); + String newColName = newCol.getName(); + Iterator iterOldCols = oldCols.iterator(); + while (iterOldCols.hasNext()) { + String oldColName = iterOldCols.next().getName(); + if (oldColName.equalsIgnoreCase(newColName)) { + console.printError("Column '" + newColName + "' exists"); + return 1; + } + } + oldCols.add(newCol); + } + tbl.getTTable().getSd().setCols(oldCols); + } + } + else if(alterTbl.getOp() == alterTableDesc.alterTableTypes.REPLACECOLS) { + // change SerDe to MetadataTypedColumnsetSerDe if it is columnsetSerDe + if(tbl.getSerializationLib().equals(columnsetSerDe.class.getName())) { + console.printInfo("Replacing columns for columnsetSerDe and changing to typed SerDe"); + tbl.setSerializationLib(MetadataTypedColumnsetSerDe.class.getName()); + } + else if(!tbl.getSerializationLib().equals(MetadataTypedColumnsetSerDe.class.getName())) { + console.printError("Replace columns is not supported for this table. SerDe may be incompatible."); + return 1; + } + tbl.getTTable().getSd().setCols(alterTbl.getNewCols()); + } + else { + console.printError("Unsupported Alter commnad"); + return 1; + } + + // set last modified by properties + tbl.setProperty("last_modified_by", System.getProperty("user.name")); + tbl.setProperty("last_modified_time", Long.toString(System.currentTimeMillis()/1000)); try { db.alterTable(alterTbl.getOldName(), tbl); @@ -194,17 +304,46 @@ public int execute() { descTableDesc descTbl = work.getDescTblDesc(); if (descTbl != null) { - boolean found = true; - + // describe the table - populate the output stream + Table tbl = db.getTable(descTbl.getTableName(), false); + Partition part = null; try { - // describe the table - populate the output stream - Table tbl = db.getTable(descTbl.getTableName()); - + if(tbl == null) { + DataOutput outStream = (DataOutput)fs.open(descTbl.getResFile()); + String errMsg = "Table " + descTbl.getTableName() + " does not exist"; + outStream.write(errMsg.getBytes("UTF-8")); + ((FSDataOutputStream)outStream).close(); + return 0; + } + if(descTbl.getPartSpec() != null) { + part = db.getPartition(tbl, descTbl.getPartSpec(), false); + if(part == null) { + DataOutput outStream = (DataOutput)fs.open(descTbl.getResFile()); + String errMsg = "Partition " + descTbl.getPartSpec() + " for table " + descTbl.getTableName() + " does not exist"; + outStream.write(errMsg.getBytes("UTF-8")); + ((FSDataOutputStream)outStream).close(); + return 0; + } + } + } catch (FileNotFoundException e) { + LOG.info("describe table: " + StringUtils.stringifyException(e)); + return 1; + } + catch (IOException e) { + LOG.info("describe table: " + StringUtils.stringifyException(e)); + return 1; + } + + try { + LOG.info("DDLTask: got data for " + tbl.getName()); // write the results in the file DataOutput os = (DataOutput)fs.create(descTbl.getResFile()); List cols = tbl.getCols(); + if(part != null) { + cols = part.getTPartition().getSd().getCols(); + } Iterator iterCols = cols.iterator(); boolean firstCol = true; while (iterCols.hasNext()) @@ -239,6 +378,19 @@ public int execute() { os.write(col.getComment().getBytes("UTF-8")); } } + + // if extended desc table then show the complete details of the table + if(descTbl.isExt()) { + if(part != null) { + // show partition informatio + os.write("\n\nDetailed Partition Information:\n".getBytes("UTF-8")); + os.write(part.getTPartition().toString().getBytes("UTF-8")); + } else { + os.write("\nDetailed Table Information:\n".getBytes("UTF-8")); + os.write(tbl.getTTable().toString().getBytes("UTF-8")); + } + } + LOG.info("DDLTask: written data for " + tbl.getName()); ((FSDataOutputStream)os).close(); @@ -246,30 +398,10 @@ public int execute() { LOG.info("describe table: " + StringUtils.stringifyException(e)); return 1; } - catch (InvalidTableException e) { - found = false; - } catch (IOException e) { LOG.info("describe table: " + StringUtils.stringifyException(e)); return 1; } - - if (!found) - { - try { - DataOutput outStream = (DataOutput)fs.open(descTbl.getResFile()); - String errMsg = "Table " + descTbl.getTableName() + " does not exist"; - outStream.write(errMsg.getBytes("UTF-8")); - ((FSDataOutputStream)outStream).close(); - } catch (FileNotFoundException e) { - LOG.info("describe table: " + StringUtils.stringifyException(e)); - return 1; - } - catch (IOException e) { - LOG.info("describe table: " + StringUtils.stringifyException(e)); - return 1; - } - } return 0; } @@ -310,7 +442,52 @@ public int execute() { return 0; } - } catch (HiveException e) { + showPartitionsDesc showParts = work.getShowPartsDesc(); + if (showParts != null) { + // get the partitions for the table and populate the output + String tabName = showParts.getTabName(); + Table tbl = null; + List parts = null; + + tbl = db.getTable(tabName); + + if (!tbl.isPartitioned()) { + console.printError("Table " + tabName + " is not a partitioned table"); + return 1; + } + + parts = db.getPartitionNames(MetaStoreUtils.DEFAULT_DATABASE_NAME, tbl.getName(), Short.MAX_VALUE); + + // write the results in the file + try { + DataOutput outStream = (DataOutput)fs.create(showParts.getResFile()); + Iterator iterParts = parts.iterator(); + boolean firstCol = true; + while (iterParts.hasNext()) + { + if (!firstCol) + outStream.write(terminator); + outStream.write(iterParts.next().getBytes("UTF-8")); + firstCol = false; + } + ((FSDataOutputStream)outStream).close(); + } catch (FileNotFoundException e) { + LOG.info("show partitions: " + StringUtils.stringifyException(e)); + return 1; + } catch (IOException e) { + LOG.info("show partitions: " + StringUtils.stringifyException(e)); + return 1; + } + return 0; + } + + } + catch (InvalidTableException e) { + console.printError("Table " + e.getTableName() + " does not exist"); + LOG.debug(StringUtils.stringifyException(e)); + return 1; + } + catch (HiveException e) { console.printError("FAILED: Error in metadata: " + e.getMessage(), "\n" + StringUtils.stringifyException(e)); LOG.debug(StringUtils.stringifyException(e)); return 1; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java index 0d5aac61a..7296d09a1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java @@ -27,12 +27,13 @@ import org.apache.commons.logging.LogFactory; import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.plan.mapredWork; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -42,6 +43,7 @@ public class ExecDriver extends Task implements Serializable { private static final long serialVersionUID = 1L; + public static final long LOAD_PER_REDUCER = 1024 * 1024 * 1024; transient protected JobConf job; @@ -80,7 +82,48 @@ protected void fillInDefaults() { LOG.warn("Number of reduce tasks not specified. Defaulting to jobconf value of: " + job.getNumReduceTasks()); work.setNumReduceTasks(job.getNumReduceTasks()); } - } + } + else + LOG.info("Number of reduce tasks determined at compile : " + work.getNumReduceTasks()); + } + + /** + * A list of the currently running jobs spawned in this Hive instance that is used + * to kill all running jobs in the event of an unexpected shutdown - i.e., the JVM shuts + * down while there are still jobs running. + */ + public static HashMap runningJobKillURIs = new HashMap (); + + + /** + * In Hive, when the user control-c's the command line, any running jobs spawned from that command + * line are best-effort killed. + * + * This static constructor registers a shutdown thread to iterate over all the running job + * kill URLs and do a get on them. + * + */ + static { + if(new org.apache.hadoop.conf.Configuration().getBoolean("webinterface.private.actions", false)) { + Runtime.getRuntime().addShutdownHook(new Thread() { + public void run() { + for(Iterator elems = runningJobKillURIs.values().iterator(); elems.hasNext() ; ) { + String uri = elems.next(); + try { + System.err.println("killing job with: " + uri); + int retCode = ((java.net.HttpURLConnection)new java.net.URL(uri).openConnection()).getResponseCode(); + if(retCode != 200) { + System.err.println("Got an error trying to kill job with URI: " + uri + " = " + retCode); + } + } catch(Exception e) { + System.err.println("trying to kill job, caught: " + e); + // do nothing + } + } + } + } + ); + } } /** @@ -123,6 +166,33 @@ public RunningJob jobProgress(JobClient jc, RunningJob rj) return rj; } + private void inferNumReducers() throws Exception { + FileSystem fs = FileSystem.get(job); + + if ((work.getReducer() != null) && (work.getInferNumReducers() == true)) { + long inpSz = 0; + + // based on the input size - estimate the number of reducers + Path[] inputPaths = FileInputFormat.getInputPaths(job); + + for (Path inputP : inputPaths) { + if (fs.exists(inputP)) { + FileStatus[] fStats = fs.listStatus(inputP); + for (FileStatus fStat:fStats) + inpSz += fStat.getLen(); + } + } + + + int newRed = (int)(inpSz / LOAD_PER_REDUCER) + 1; + if (newRed < work.getNumReduceTasks().intValue()) + { + LOG.warn("Number of reduce tasks inferred based on input size to : " + newRed); + work.setNumReduceTasks(Integer.valueOf(newRed)); + } + } + } + /** * Execute a query plan using Hadoop */ @@ -141,24 +211,24 @@ public int execute() { LOG.info("Adding input file " + onefile); FileInputFormat.addInputPaths(job, onefile); } - + String hiveScratchDir = HiveConf.getVar(job, HiveConf.ConfVars.SCRATCHDIR); String jobScratchDir = hiveScratchDir + Utilities.randGen.nextInt(); FileOutputFormat.setOutputPath(job, new Path(jobScratchDir)); job.setMapperClass(ExecMapper.class); - + job.setMapOutputValueClass(Text.class); job.setMapOutputKeyClass(HiveKey.class); - + job.setNumReduceTasks(work.getNumReduceTasks().intValue()); job.setReducerClass(ExecReducer.class); - + job.setInputFormat(org.apache.hadoop.hive.ql.io.HiveInputFormat.class); - + // No-Op - we don't really write anything here .. job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); - + String auxJars = HiveConf.getVar(job, HiveConf.ConfVars.HIVEAUXJARS); if (StringUtils.isNotBlank(auxJars)) { LOG.info("adding libjars: " + auxJars); @@ -168,15 +238,41 @@ public int execute() { int returnVal = 0; FileSystem fs = null; RunningJob rj = null; - + try { fs = FileSystem.get(job); + + // if the input is empty exit gracefully + Path[] inputPaths = FileInputFormat.getInputPaths(job); + boolean emptyInput = true; + for (Path inputP : inputPaths) { + if(!fs.exists(inputP)) + continue; + + FileStatus[] fStats = fs.listStatus(inputP); + for (FileStatus fStat:fStats) { + if (fStat.getLen() > 0) { + emptyInput = false; + break; + } + } + } + + if (emptyInput) { + console.printInfo("Job need not be submitted: no output: Success"); + return 0; + } + + inferNumReducers(); JobClient jc = new JobClient(job); rj = jc.submitJob(job); + // add to list of running jobs so in case of abnormal shutdown can kill it. + runningJobKillURIs.put(rj.getJobID(), rj.getTrackingURL() + "&action=kill"); + jobInfo(rj); rj = jobProgress(jc, rj); - + String statusMesg = "Ended Job = " + rj.getJobID(); if(!rj.isSuccessful()) { statusMesg += " with errors"; @@ -203,6 +299,7 @@ public int execute() { if(returnVal != 0 && rj != null) { rj.killJob(); } + runningJobKillURIs.remove(rj.getJobID()); } catch (Exception e) {} } return (returnVal); @@ -297,5 +394,16 @@ public static String generateCmdLine(HiveConf hconf) { } return sb.toString(); } + + @Override + public boolean isMapRedTask() { + return true; + } + + @Override + public boolean hasReduce() { + mapredWork w = getWork(); + return w.getReducer() != null; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java index 8dbca81be..c084fcb22 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java @@ -37,7 +37,8 @@ public class ExecMapper extends MapReduceBase implements Mapper { private boolean abort = false; private Reporter rp; public static final Log l4j = LogFactory.getLog("ExecMapper"); - + private static boolean done; + public void configure(JobConf job) { jc = job; mapredWork mrwork = Utilities.getMapRedWork(job); @@ -63,8 +64,11 @@ public void map(Object key, Object value, } try { - // Since there is no concept of a group, we don't invoke startGroup/endGroup for a mapper - mo.process((Writable)value); + if (mo.getDone()) + done = true; + else + // Since there is no concept of a group, we don't invoke startGroup/endGroup for a mapper + mo.process((Writable)value); } catch (HiveException e) { abort = true; e.printStackTrace(); @@ -73,6 +77,19 @@ public void map(Object key, Object value, } public void close() { + // No row was processed + if(oc == null) { + try { + l4j.trace("Close called no row"); + mo.initialize(jc); + rp = null; + } catch (HiveException e) { + abort = true; + e.printStackTrace(); + throw new RuntimeException ("Map operator close failed during initialize", e); + } + } + // detecting failed executions by exceptions thrown by the operator tree // ideally hadoop should let us know whether map execution failed or not try { @@ -89,6 +106,10 @@ public void close() { } } + public static boolean getDone() { + return done; + } + public static class reportStats implements Operator.OperatorFunc { Reporter rp; public reportStats (Reporter rp) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java index 50bcf96a9..138fcc69d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java @@ -183,6 +183,20 @@ public void reduce(Object key, Iterator values, } public void close() { + + // No row was processed + if(oc == null) { + try { + l4j.trace("Close called no row"); + reducer.initialize(jc); + rp = null; + } catch (HiveException e) { + abort = true; + e.printStackTrace(); + throw new RuntimeException ("Reduce operator close failed during initialize", e); + } + } + try { if (groupKey != null) { // If a operator wants to do some work at the end of a group diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 8b57dcf99..919664f6c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -28,6 +28,8 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Arrays; +import java.util.Comparator; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.ql.plan.explain; @@ -183,7 +185,10 @@ private void outputPlan(Serializable work, PrintStream out, boolean extended, in } // We look at all methods that generate values for explain - for(Method m: work.getClass().getMethods()) { + Method[] methods = work.getClass().getMethods(); + Arrays.sort(methods, new MethodComparator()); + + for(Method m: methods) { int prop_indents = indent+2; note = m.getAnnotation(explain.class); @@ -330,4 +335,13 @@ public void outputStagePlans(PrintStream out, new HashSet>(), indent+2); } } + + public static class MethodComparator implements Comparator { + public int compare(Object o1, Object o2) { + Method m1 = (Method)o1; + Method m2 = (Method)o2; + return m1.getName().compareTo(m2.getName()); + } + } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java index c25e4a170..5eea3b1b3 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java @@ -18,11 +18,16 @@ package org.apache.hadoop.hive.ql.exec; +import java.util.ArrayList; +import java.util.List; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc; import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; @@ -31,9 +36,10 @@ public class ExprNodeFieldEvaluator extends ExprNodeEvaluator { protected exprNodeFieldDesc desc; transient ExprNodeEvaluator leftEvaluator; transient InspectableObject leftInspectableObject; - transient StructObjectInspector cachedLeftObjectInspector; + transient StructObjectInspector structObjectInspector; transient StructField field; - transient ObjectInspector fieldObjectInspector; + transient ObjectInspector structFieldObjectInspector; + transient ObjectInspector resultObjectInspector; public ExprNodeFieldEvaluator(exprNodeFieldDesc desc) { this.desc = desc; @@ -50,14 +56,19 @@ public void evaluate(Object row, ObjectInspector rowInspector, leftEvaluator.evaluate(row, rowInspector, leftInspectableObject); if (field == null) { - cachedLeftObjectInspector = (StructObjectInspector)leftInspectableObject.oi; - field = cachedLeftObjectInspector.getStructFieldRef(desc.getFieldName()); - fieldObjectInspector = field.getFieldObjectInspector(); + evaluateInspector(rowInspector); + } + result.oi = resultObjectInspector; + if (desc.getIsList()) { + List list = ((ListObjectInspector)leftInspectableObject.oi).getList(leftInspectableObject.o); + List r = new ArrayList(list.size()); + for(int i=0; i implements Serializable { + private static final long serialVersionUID = 1L; + + static final private int MAX_ROWS = 100; + + public void initialize (HiveConf conf) { + super.initialize(conf); + splitNum = 0; + currRecReader = null; + + try { + // Create a file system handle + fs = FileSystem.get(conf); + serde = work.getDeserializerClass().newInstance(); + serde.initialize(null, work.getSchema()); + job = new JobConf(conf, ExecDriver.class); + Path inputP = work.getSrcDir(); + if(!fs.exists(inputP)) { + empty = true; + return; + } + + empty = true; + FileStatus[] fStats = fs.listStatus(inputP); + for (FileStatus fStat:fStats) { + if (fStat.getLen() > 0) { + empty = false; + break; + } + } + + if (empty) + return; + + FileInputFormat.setInputPaths(job, inputP); + inputFormat = getInputFormatFromCache(work.getInputFormatClass(), job); + inputSplits = inputFormat.getSplits(job, 1); + mSerde = new MetadataTypedColumnsetSerDe(); + Properties mSerdeProp = new Properties(); + mSerdeProp.put(Constants.SERIALIZATION_FORMAT, "" + Utilities.tabCode); + mSerdeProp.put(Constants.SERIALIZATION_NULL_FORMAT, "NULL"); + mSerde.initialize(null, mSerdeProp); + totalRows = 0; + } catch (Exception e) { + // Bail out ungracefully - we should never hit + // this here - but would have hit it in SemanticAnalyzer + LOG.error(StringUtils.stringifyException(e)); + throw new RuntimeException (e); + } + } + + public int execute() { + assert false; + return 0; + } + + /** + * A cache of InputFormat instances. + */ + private static Map> inputFormats = + new HashMap>(); + + static InputFormat getInputFormatFromCache(Class inputFormatClass, Configuration conf) throws IOException { + if (!inputFormats.containsKey(inputFormatClass)) { + try { + InputFormat newInstance = + (InputFormat)ReflectionUtils.newInstance(inputFormatClass, conf); + inputFormats.put(inputFormatClass, newInstance); + } catch (Exception e) { + throw new IOException("Cannot create an instance of InputFormat class " + inputFormatClass.getName() + + " as specified in mapredWork!"); + } + } + return inputFormats.get(inputFormatClass); + } + + private int splitNum; + private FileSystem fs; + private RecordReader currRecReader; + private InputSplit[] inputSplits; + private InputFormat inputFormat; + private JobConf job; + private WritableComparable key; + private Writable value; + private Deserializer serde; + private MetadataTypedColumnsetSerDe mSerde; + private int totalRows; + private boolean empty; + + private RecordReader getRecordReader() throws Exception { + if (splitNum >= inputSplits.length) + return null; + currRecReader = inputFormat.getRecordReader(inputSplits[splitNum++], job, Reporter.NULL); + key = currRecReader.createKey(); + value = currRecReader.createValue(); + return currRecReader; + } + + public boolean fetch(Vector res) { + try { + if (empty) + return false; + + int numRows = 0; + int rowsRet = MAX_ROWS; + if ((work.getLimit() >= 0) && ((work.getLimit() - totalRows) < rowsRet)) + rowsRet = work.getLimit() - totalRows; + if (rowsRet <= 0) { + if (currRecReader != null) + currRecReader.close(); + return false; + } + + while (numRows < rowsRet) { + if (currRecReader == null) { + currRecReader = getRecordReader(); + if (currRecReader == null) { + if (numRows == 0) + return false; + totalRows += numRows; + return true; + } + } + boolean ret = currRecReader.next(key, value); + if (ret) { + Object obj = serde.deserialize(value); + res.add(((Text)mSerde.serialize(obj, serde.getObjectInspector())).toString()); + numRows++; + } + else { + currRecReader.close(); + currRecReader = getRecordReader(); + if (currRecReader == null) { + if (numRows == 0) + return false; + totalRows += numRows; + return true; + } + else { + key = currRecReader.createKey(); + value = currRecReader.createValue(); + } + } + } + totalRows += numRows; + return true; + } + catch (Exception e) { + console.printError("Failed with exception " + e.getMessage(), "\n" + StringUtils.stringifyException(e)); + return false; + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index 4aa51b3f9..c283647d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.fileSinkDesc; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -101,12 +102,27 @@ public void initialize(Configuration hconf) throws HiveException { if(isCompressed) { finalPath = new Path(conf.getDirName(), Utilities.getTaskId(hconf) + ".gz"); } + String rowSeparatorString = conf.getTableInfo().getProperties().getProperty(Constants.LINE_DELIM, "\n"); + int rowSeparator = 0; + try { + rowSeparator = Byte.parseByte(rowSeparatorString); + } catch (NumberFormatException e) { + rowSeparator = rowSeparatorString.charAt(0); + } + final int finalRowSeparator = rowSeparator; final OutputStream outStream = Utilities.createCompressedStream(jc, fs.create(outPath)); outWriter = new RecordWriter () { public void write(Writable r) throws IOException { - Text tr = (Text)r; - outStream.write(tr.getBytes(), 0, tr.getLength()); - outStream.write('\n'); + if (r instanceof Text) { + Text tr = (Text)r; + outStream.write(tr.getBytes(), 0, tr.getLength()); + outStream.write(finalRowSeparator); + } else { + // DynamicSerDe always writes out BytesWritable + BytesWritable bw = (BytesWritable)r; + outStream.write(bw.get(), 0, bw.getSize()); + outStream.write(finalRowSeparator); + } } public void close(boolean abort) throws IOException { outStream.close(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index e88e9f074..27b8ea906 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -52,7 +52,15 @@ public class FunctionRegistry { registerUDF("str_lt", UDFStrLt.class, OperatorType.PREFIX, false); registerUDF("str_ge", UDFStrGe.class, OperatorType.PREFIX, false); registerUDF("str_le", UDFStrLe.class, OperatorType.PREFIX, false); + + registerUDF("size", UDFSize.class, OperatorType.PREFIX, false); + registerUDF("round", UDFRound.class, OperatorType.PREFIX, false); + registerUDF("floor", UDFFloor.class, OperatorType.PREFIX, false); + registerUDF("ceil", UDFCeil.class, OperatorType.PREFIX, false); + registerUDF("ceiling", UDFCeil.class, OperatorType.PREFIX, false); + registerUDF("rand", UDFRand.class, OperatorType.PREFIX, false); + registerUDF("upper", UDFUpper.class, OperatorType.PREFIX, false); registerUDF("lower", UDFLower.class, OperatorType.PREFIX, false); registerUDF("ucase", UDFUpper.class, OperatorType.PREFIX, false); @@ -66,6 +74,9 @@ public class FunctionRegistry { registerUDF("regexp", UDFRegExp.class, OperatorType.INFIX, true); registerUDF("regexp_replace", UDFRegExpReplace.class, OperatorType.PREFIX, false); + registerUDF("positive", UDFOPPositive.class, OperatorType.PREFIX, true, "+"); + registerUDF("negative", UDFOPNegative.class, OperatorType.PREFIX, true, "-"); + registerUDF("+", UDFOPPlus.class, OperatorType.INFIX, true); registerUDF("-", UDFOPMinus.class, OperatorType.INFIX, true); registerUDF("*", UDFOPMultiply.class, OperatorType.INFIX, true); @@ -75,7 +86,7 @@ public class FunctionRegistry { registerUDF("&", UDFOPBitAnd.class, OperatorType.INFIX, true); registerUDF("|", UDFOPBitOr.class, OperatorType.INFIX, true); registerUDF("^", UDFOPBitXor.class, OperatorType.INFIX, true); - registerUDF("~", UDFOPBitNot.class, OperatorType.INFIX, true); + registerUDF("~", UDFOPBitNot.class, OperatorType.PREFIX, true); registerUDF("=", UDFOPEqual.class, OperatorType.INFIX, true); registerUDF("==", UDFOPEqual.class, OperatorType.INFIX, true, "="); @@ -89,8 +100,8 @@ public class FunctionRegistry { registerUDF("&&", UDFOPAnd.class, OperatorType.INFIX, true, "and"); registerUDF("or", UDFOPOr.class, OperatorType.INFIX, true); registerUDF("||", UDFOPOr.class, OperatorType.INFIX, true, "or"); - registerUDF("not", UDFOPNot.class, OperatorType.INFIX, true); - registerUDF("!", UDFOPNot.class, OperatorType.INFIX, true, "not"); + registerUDF("not", UDFOPNot.class, OperatorType.PREFIX, true); + registerUDF("!", UDFOPNot.class, OperatorType.PREFIX, true, "not"); registerUDF("isnull", UDFOPNull.class, OperatorType.POSTFIX, true, "is null"); registerUDF("isnotnull", UDFOPNotNull.class, OperatorType.POSTFIX, true, "is not null"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java new file mode 100644 index 000000000..9201ae39b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.io.*; + +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.limitDesc; +import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.conf.Configuration; + +/** + * Limit operator implementation + * Limits a subobject and passes that on. + **/ +public class LimitOperator extends Operator implements Serializable { + private static final long serialVersionUID = 1L; + + transient protected int limit; + transient protected int currCount; + + public void initialize(Configuration hconf) throws HiveException { + super.initialize(hconf); + limit = conf.getLimit(); + currCount = 0; + } + + public void process(Object row, ObjectInspector rowInspector) throws HiveException { + if (currCount < limit) { + forward(row, rowInspector); + currCount++; + } + else + setDone(true); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java index 4382b7815..ad239e05f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java @@ -88,4 +88,15 @@ public int execute() { return (1); } } + + @Override + public boolean isMapRedTask() { + return true; + } + + @Override + public boolean hasReduce() { + mapredWork w = getWork(); + return w.getReducer() != null; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java index 9d033fa71..74a7d1868 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java @@ -55,6 +55,7 @@ public List> getChildOperators() { protected String id; protected T conf; + protected boolean done; public void setConf(T conf) { this.conf = conf; @@ -73,6 +74,14 @@ public String getId() { return id; } + public boolean getDone() { + return done; + } + + public void setDone(boolean done) { + this.done = done; + } + // non-bean fields needed during compilation transient private RowSchema rowSchema; @@ -219,9 +228,24 @@ public void close(boolean abort) throws HiveException { protected void forward(Object row, ObjectInspector rowInspector) throws HiveException { - if(childOperators == null) { + if((childOperators == null) || (getDone())) { return; } + + // if all children are done, this operator is also done + boolean isDone = true; + for(Operator o: childOperators) { + if (!o.getDone()) { + isDone = false; + break; + } + } + + if (isDone) { + setDone(isDone); + return; + } + for(Operator o: childOperators) { o.process(row, rowInspector); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java index 863e3ae55..4841bda6e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java @@ -48,6 +48,7 @@ public opTuple(Class descClass, Class> opClass) { opvec.add(new opTuple (extractDesc.class, ExtractOperator.class)); opvec.add(new opTuple (groupByDesc.class, GroupByOperator.class)); opvec.add(new opTuple (joinDesc.class, JoinOperator.class)); + opvec.add(new opTuple (limitDesc.class, LimitOperator.class)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java index 8ec808c5e..ed593b169 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java @@ -134,10 +134,10 @@ public void process(Object row, ObjectInspector rowInspector) throws HiveExcepti } keyWritable.setHashCode(keyHashCode); - ArrayList values = new ArrayList(valueEval.length); + ArrayList values = new ArrayList(valueEval.length); for(ExprNodeEvaluator e: valueEval) { e.evaluate(row, rowInspector, tempInspectableObject); - values.add(tempInspectableObject.o == null ? null : tempInspectableObject.o.toString()); + values.add(tempInspectableObject.o); if (valueObjectInspector == null) { valueFieldsObjectInspectors.add(tempInspectableObject.oi); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 84a330da3..998bd0588 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -86,6 +86,12 @@ public void initialize (HiveConf conf) { } public abstract int execute(); + + // dummy method - FetchTask overwrites this + public boolean fetch(Vector res) { + assert false; + return false; + } public void setChildTasks(List> childTasks) { this.childTasks = childTasks; @@ -158,4 +164,11 @@ public String getId() { return id; } + public boolean isMapRedTask() { + return false; + } + + public boolean hasReduce() { + return false; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java index 528a1995d..9f78b3208 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java @@ -44,6 +44,7 @@ public taskTuple(Class workClass, Class> taskClass) { id = 0; taskvec = new ArrayList>(); taskvec.add(new taskTuple(moveWork.class, MoveTask.class)); + taskvec.add(new taskTuple(fetchWork.class, FetchTask.class)); taskvec.add(new taskTuple(copyWork.class, CopyTask.class)); taskvec.add(new taskTuple(DDLWork.class, DDLTask.class)); taskvec.add(new taskTuple(FunctionWork.class, FunctionTask.class)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java index 334f83d30..a7befefa9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java @@ -19,30 +19,20 @@ package org.apache.hadoop.hive.ql.exec; /** - * A dummy User-defined function (UDF) for the use with Hive. + * A User-defined function (UDF) for the use with Hive. * - * New UDF classes do NOT need to inherit from this UDF class. + * New UDF classes need to inherit from this UDF class. * * Required for all UDF classes: - * 1. Implement a single method named "evaluate" which will be called by Hive. + * 1. Implement one or more methods named "evaluate" which will be called by Hive. * The following are some examples: + * public int evaluate(); * public int evaluate(int a); * public double evaluate(int a, double b); * public String evaluate(String a, int b, String c); * - * "evaluate" should neither be a void method, nor should it returns "null" in any case. - * In both cases, the Hive system will throw an HiveException saying the evaluation of UDF - * is failed. + * "evaluate" should never be a void method. However it can return "null" if needed. */ -public class UDF { +public interface UDF { - public UDF() { } - - /** Evaluate the UDF. - * @return plain old java object - **/ - public int evaluate() { - return 0; - } - } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 5829d54ce..bf01931a3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -382,7 +382,7 @@ public static String abbreviate(String str, int max) { } public final static String NSTR = ""; - public static enum streamStatus {EOF, TERMINATED, NORMAL} + public static enum streamStatus {EOF, TERMINATED} public static streamStatus readColumn(DataInput in, OutputStream out) throws IOException { while (true) { @@ -397,10 +397,6 @@ public static streamStatus readColumn(DataInput in, OutputStream out) throws IOE return streamStatus.TERMINATED; } - if (b == Utilities.ctrlaCode) { - return streamStatus.NORMAL; - } - out.write(b); } // Unreachable diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index 99d3f5214..480780675 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -179,8 +179,8 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, } InputFormat inputFormat = getInputFormatFromCache(inputFormatClass); - - return inputFormat.getRecordReader(inputSplit, job, reporter); + + return new HiveRecordReader(inputFormat.getRecordReader(inputSplit, job, reporter)); } @@ -219,6 +219,7 @@ public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { return result.toArray(new HiveInputSplit[result.size()]); } + private tableDesc getTableDescFromPath(Path dir) throws IOException { partitionDesc partDesc = pathToPartitionInfo.get(dir.toString()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java new file mode 100644 index 000000000..30a0b8909 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io; + +import org.apache.hadoop.hive.ql.exec.ExecMapper; +import org.apache.hadoop.mapred.RecordReader; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableComparable; +import java.io.IOException; + +public class HiveRecordReader + implements RecordReader { + + private RecordReader recordReader; + public HiveRecordReader(RecordReader recordReader){ + this.recordReader = recordReader; + } + + public void close() throws IOException { + recordReader.close(); + } + + public K createKey() { + return (K)recordReader.createKey(); + } + + public V createValue() { + return (V)recordReader.createValue(); + } + + public long getPos() throws IOException { + return recordReader.getPos(); + } + + public float getProgress() throws IOException { + return recordReader.getProgress(); + } + + public boolean next(K key, V value) throws IOException { + if (ExecMapper.getDone()) + return false; + return recordReader.next(key, value); + } +} + diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 76eddca4f..28ca0a590 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -105,7 +105,6 @@ private Hive(HiveConf c) throws HiveException { this.conf = c; try { msc = this.createMetaStoreClient(); - //msc = new HiveMetaStoreClient(this.conf); } catch (MetaException e) { throw new HiveException("Unable to open connection to metastore", e); } @@ -169,7 +168,7 @@ public void createTable(String tableName, List columns, List par tbl.getPartCols().add(part); } } - tbl.setSerializationLib(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.shortName()); + tbl.setSerializationLib(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName()); tbl.setNumBuckets(bucketCount); createTable(tbl); } @@ -269,7 +268,7 @@ public Table getTable(final String tableName, boolean throwException) throws Hiv } catch (NoSuchObjectException e) { if(throwException) { LOG.error(StringUtils.stringifyException(e)); - throw new InvalidTableException("Table not found " + tableName); + throw new InvalidTableException("Table not found ", tableName); } return null; } catch (Exception e) { @@ -463,6 +462,17 @@ public Partition getPartition(Table tbl, AbstractMap partSpec, b return new Partition(tbl, tpart); } + public boolean dropPartition(String db_name, String tbl_name, List part_vals, + boolean deleteData) throws HiveException { + try { + return msc.dropPartition(db_name, tbl_name, part_vals, deleteData); + } catch (NoSuchObjectException e) { + throw new HiveException("Partition or table doesn't exist.", e); + } catch (Exception e) { + throw new HiveException("Unknow error. Please check logs.", e); + } + } + public List getPartitionNames(String dbName, String tblName, short max) throws HiveException { List names = null; try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index 4e8f95af1..a57e39b37 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -58,6 +58,14 @@ public org.apache.hadoop.hive.metastore.api.Partition getTPartition() { private LinkedHashMap spec; + /** + * @return + * @see org.apache.hadoop.hive.metastore.api.Partition#getValues() + */ + public List getValues() { + return tPartition.getValues(); + } + private Path partPath; private URI partURI; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index a5a0d60ba..501ccfba4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -95,7 +95,7 @@ public Table(String name, Properties schema, Deserializer deserializer, initEmpty(); this.schema = schema; this.deserializer = deserializer; //TODO: convert to SerDeInfo format - this.getTTable().getSd().getSerdeInfo().setSerializationLib(deserializer.getShortName()); + this.getTTable().getSd().getSerdeInfo().setSerializationLib(deserializer.getClass().getName()); getTTable().setTableName(name); getSerdeInfo().setSerializationLib(deserializer.getClass().getName()); setInputFormatClass(inputFormatClass); @@ -108,7 +108,7 @@ public Table(String name) { initEmpty(); getTTable().setTableName(name); getTTable().setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME); - getSerdeInfo().setSerializationLib(MetadataTypedColumnsetSerDe.shortName()); + getSerdeInfo().setSerializationLib(MetadataTypedColumnsetSerDe.class.getName()); getSerdeInfo().getParameters().put(Constants.SERIALIZATION_FORMAT, "1"); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 9e0a0ff56..d7fbbafac 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -43,6 +42,8 @@ public abstract class BaseSemanticAnalyzer { protected final Hive db; protected final HiveConf conf; protected List> rootTasks; + protected Task fetchTask; + protected boolean fetchTaskInit; protected final Log LOG; protected final LogHelper console; @@ -65,13 +66,40 @@ public BaseSemanticAnalyzer(HiveConf conf) throws SemanticException { } } - public abstract void analyze(CommonTree ast, Context ctx) throws SemanticException; + public abstract void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException; + + public void analyze(CommonTree ast, Context ctx) throws SemanticException { + scratchDir = ctx.getScratchDir(); + analyzeInternal(ast, ctx); + } public List> getRootTasks() { return rootTasks; } - protected void reset() { + /** + * @return the fetchTask + */ + public Task getFetchTask() { + return fetchTask; + } + + /** + * @param fetchTask the fetchTask to set + */ + public void setFetchTask(Task fetchTask) { + this.fetchTask = fetchTask; + } + + public boolean getFetchTaskInit() { + return fetchTaskInit; + } + + public void setFetchTaskInit(boolean fetchTaskInit) { + this.fetchTaskInit = fetchTaskInit; + } + + protected void reset() { rootTasks = new ArrayList>(); } @@ -118,9 +146,33 @@ public static String charSetString(String charSetName, String charSetString) public static String unescapeSQLString(String b) { assert(b.charAt(0) == '\''); assert(b.charAt(b.length()-1) == '\''); + + // Some of the strings can be passed in as unicode. For example, the + // delimiter can be passed in as \002 - So, we first check if the + // string is a unicode number, else go back to the old behavior StringBuilder sb = new StringBuilder(b.length()); - for(int i=1; i+1= '0' && i1 <= '1') && + (i2 >= '0' && i2 <= '7') && + (i3 >= '0' && i3 <= '7')) + { + byte bVal = (byte)((i3 - '0') + ((i2 - '0') * 8 ) + ((i1 - '0') * 8 * 8)); + byte[] bValArr = new byte[1]; + bValArr[0] = bVal; + String tmp = new String(bValArr); + sb.append(tmp); + i += 4; + continue; + } + } + + if (b.charAt(i) == '\\' && (i+2 < b.length())) { char n=b.charAt(i+1); switch(n) { case '0': sb.append("\0"); break; @@ -141,6 +193,7 @@ public static String unescapeSQLString(String b) { } else { sb.append(b.charAt(i)); } + i++; } return sb.toString(); } @@ -159,7 +212,7 @@ public static class tableSpec { public HashMap partSpec; public Partition partHandle; - public tableSpec(Hive db, CommonTree ast) throws SemanticException { + public tableSpec(Hive db, CommonTree ast, boolean forceCreatePartition) throws SemanticException { assert(ast.getToken().getType() == HiveParser.TOK_TAB); int childIndex = 0; @@ -179,7 +232,10 @@ public tableSpec(Hive db, CommonTree ast) throws SemanticException { String val = stripQuotes(partspec_val.getChild(1).getText()); partSpec.put(partspec_val.getChild(0).getText(), val); } - partHandle = Hive.get().getPartition(tableHandle, partSpec, true); + partHandle = Hive.get().getPartition(tableHandle, partSpec, forceCreatePartition); + if(partHandle == null) { + throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(childIndex))); + } } } catch (InvalidTableException ite) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(ast.getChild(0)), ite); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index e316993d9..049c5c6c9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -18,13 +18,22 @@ package org.apache.hadoop.hive.ql.parse; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.antlr.runtime.tree.CommonTree; +import org.antlr.runtime.tree.Tree; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; - -import org.antlr.runtime.tree.CommonTree; - import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.DDLWork; @@ -32,17 +41,11 @@ import org.apache.hadoop.hive.ql.plan.createTableDesc; import org.apache.hadoop.hive.ql.plan.descTableDesc; import org.apache.hadoop.hive.ql.plan.dropTableDesc; +import org.apache.hadoop.hive.ql.plan.showPartitionsDesc; import org.apache.hadoop.hive.ql.plan.showTablesDesc; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.alterTableDesc.alterTableTypes; import org.apache.hadoop.hive.serde.Constants; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import java.util.*; - public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { private static final Log LOG = LogFactory.getLog("hive.ql.parse.DDLSemanticAnalyzer"); public static final Map TokenToTypeName = new HashMap(); @@ -67,7 +70,7 @@ public DDLSemanticAnalyzer(HiveConf conf) throws SemanticException { } @Override - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { this.ctx = ctx; if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE) analyzeCreateTable(ast, false); @@ -88,7 +91,16 @@ else if (ast.getToken().getType() == HiveParser.TOK_SHOWTABLES) else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME) analyzeAlterTableRename(ast); else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) - analyzeAlterTableAddCols(ast); + analyzeAlterTableModifyCols(ast, alterTableTypes.ADDCOLS); + else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) + analyzeAlterTableModifyCols(ast, alterTableTypes.REPLACECOLS); + else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) + analyzeAlterTableDropParts(ast); + else if (ast.getToken().getType() == HiveParser.TOK_SHOWPARTITIONS) + { + ctx.setResFile(new Path(getTmpFileName())); + analyzeShowPartitions(ast); + } } private void analyzeCreateTable(CommonTree ast, boolean isExt) @@ -107,8 +119,10 @@ private void analyzeCreateTable(CommonTree ast, boolean isExt) String comment = null; boolean isSequenceFile = false; String location = null; + String serde = null; + Map mapProp = null; - LOG.info("Creating table" + tableName); + LOG.info("Creating table" + tableName); int numCh = ast.getChildCount(); for (int num = 2; num < numCh; num++) { @@ -152,6 +166,18 @@ private void analyzeCreateTable(CommonTree ast, boolean isExt) } } break; + case HiveParser.TOK_TABLESERIALIZER: + serde = unescapeSQLString(child.getChild(0).getText()); + if (child.getChildCount() == 2) { + mapProp = new HashMap(); + CommonTree prop = (CommonTree)((CommonTree)child.getChild(1)).getChild(0); + for (int propChild = 0; propChild < prop.getChildCount(); propChild++) { + String key = unescapeSQLString(prop.getChild(propChild).getChild(0).getText()); + String value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText()); + mapProp.put(key,value); + } + } + break; case HiveParser.TOK_TBLSEQUENCEFILE: isSequenceFile = true; break; @@ -166,7 +192,7 @@ private void analyzeCreateTable(CommonTree ast, boolean isExt) new createTableDesc(tableName, isExt, cols, partCols, bucketCols, sortCols, numBuckets, fieldDelim, collItemDelim, mapKeyDelim, lineDelim, - comment, isSequenceFile, location); + comment, isSequenceFile, location, serde, mapProp); validateCreateTable(crtTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(crtTblDesc), conf)); @@ -310,12 +336,34 @@ private List getColumnNamesOrder(CommonTree ast) private void analyzeDescribeTable(CommonTree ast) throws SemanticException { - String tableName = ast.getChild(0).getText(); - descTableDesc descTblDesc = new descTableDesc(ctx.getResFile(), tableName); + Tree table_t = ast.getChild(0); + String tableName = table_t.getChild(0).getText(); + HashMap partSpec = null; + // get partition metadata if partition specified + if (table_t.getChildCount() == 2) { + CommonTree partspec = (CommonTree) table_t.getChild(1); + partSpec = new LinkedHashMap(); + for (int i = 0; i < partspec.getChildCount(); ++i) { + CommonTree partspec_val = (CommonTree) partspec.getChild(i); + String val = stripQuotes(partspec_val.getChild(1).getText()); + partSpec.put(partspec_val.getChild(0).getText(), val); + } + } + + boolean isExt = ast.getChildCount() > 1; + descTableDesc descTblDesc = new descTableDesc(ctx.getResFile(), tableName, partSpec, isExt); rootTasks.add(TaskFactory.get(new DDLWork(descTblDesc), conf)); LOG.info("analyzeDescribeTable done"); } + private void analyzeShowPartitions(CommonTree ast) + throws SemanticException { + showPartitionsDesc showPartsDesc; + String tableName = ast.getChild(0).getText(); + showPartsDesc = new showPartitionsDesc(tableName, ctx.getResFile()); + rootTasks.add(TaskFactory.get(new DDLWork(showPartsDesc), conf)); + } + private void analyzeShowTables(CommonTree ast) throws SemanticException { showTablesDesc showTblsDesc; @@ -335,34 +383,32 @@ private void analyzeAlterTableRename(CommonTree ast) rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); } - private void analyzeAlterTableAddCols(CommonTree ast) + private void analyzeAlterTableModifyCols(CommonTree ast, alterTableTypes alterType) throws SemanticException { String tblName = ast.getChild(0).getText(); List newCols = getColumns((CommonTree)ast.getChild(1)); - Table tbl; - try { - tbl = db.getTable(tblName); - } catch (HiveException e) { - throw new SemanticException(e.getMessage()); - } - List oldCols = tbl.getCols(); - - // make sure the columns does not already exist - Iterator iterNewCols = newCols.iterator(); - while (iterNewCols.hasNext()) { - FieldSchema newCol = iterNewCols.next(); - String newColName = newCol.getName(); - Iterator iterOldCols = oldCols.iterator(); - while (iterOldCols.hasNext()) { - String oldColName = iterOldCols.next().getName(); - if (oldColName.equalsIgnoreCase(newColName)) - throw new SemanticException(ErrorMsg.DUPLICATE_COLUMN_NAMES.getMsg()); - } - oldCols.add(newCol); - } - - alterTableDesc alterTblDesc = new alterTableDesc(tblName, oldCols); + alterTableDesc alterTblDesc = new alterTableDesc(tblName, newCols, alterType); rootTasks.add(TaskFactory.get(new DDLWork(alterTblDesc), conf)); } + private void analyzeAlterTableDropParts(CommonTree ast) throws SemanticException { + String tblName = null; + List> partSpecs = new ArrayList>(); + int childIndex = 0; + // get table metadata + tblName = ast.getChild(0).getText(); + // get partition metadata if partition specified + for( childIndex = 1; childIndex < ast.getChildCount(); childIndex++) { + CommonTree partspec = (CommonTree) ast.getChild(childIndex); + HashMap partSpec = new LinkedHashMap(); + for (int i = 0; i < partspec.getChildCount(); ++i) { + CommonTree partspec_val = (CommonTree) partspec.getChild(i); + String val = stripQuotes(partspec_val.getChild(1).getText()); + partSpec.put(partspec_val.getChild(0).getText(), val); + } + partSpecs.add(partSpec); + } + dropTableDesc dropTblDesc = new dropTableDesc(tblName, partSpecs); + rootTasks.add(TaskFactory.get(new DDLWork(dropTblDesc), conf)); + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java index 7b5949c19..8ed865970 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java @@ -28,6 +28,7 @@ public enum ErrorMsg { GENERIC_ERROR("Exception while processing"), INVALID_TABLE("Table not found"), INVALID_COLUMN("Invalid Column Reference"), + INVALID_PARTITION("Partition not found"), AMBIGOUS_COLUMN("Ambigous Column Reference"), AMBIGOUS_TABLE_ALIAS("Ambigous Table Alias"), INVALID_TABLE_ALIAS("Invalid Table Alias"), @@ -47,12 +48,17 @@ public enum ErrorMsg { ILLEGAL_PATH("Path is not legal"), INVALID_NUMERICAL_CONSTANT("Invalid Numerical Constant"), INVALID_ARRAYINDEX_CONSTANT("Non Constant Expressions for Array Indexes not Supported"), + INVALID_MAPINDEX_CONSTANT("Non Constant Expression for Map Indexes not Supported"), + INVALID_MAPINDEX_TYPE("Map Key Type does not Match Index Expression Type"), + NON_COLLECTION_TYPE("[] not Valid on Non Collection Types"), SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"), COLUMN_REPAEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"), DUPLICATE_COLUMN_NAMES("Duplicate column names"), COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in cluster and sort by"), SAMPLE_RESTRICTION("Cannot Sample on More Than Two Columns"), - SAMPLE_COLUMN_NOT_FOUND("Sample Column Not Found"); + SAMPLE_COLUMN_NOT_FOUND("Sample Column Not Found"), + NO_PARTITION_PREDICATE("No Partition Predicate Found"), + INVALID_DOT(". operator is only supported on struct or list of struct types"); private String mesg; ErrorMsg(String mesg) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index e0d6e1b3b..78843b110 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -18,12 +18,15 @@ package org.apache.hadoop.hive.ql.parse; -import java.io.File; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import org.antlr.runtime.tree.CommonTree; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.explainWork; @@ -34,7 +37,7 @@ public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); } - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { // Create a semantic analyzer for the query BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (CommonTree)ast.getChild(0)); @@ -46,9 +49,18 @@ public void analyze(CommonTree ast, Context ctx) throws SemanticException { } ctx.setResFile(new Path(getTmpFileName())); - - rootTasks.add(TaskFactory.get(new explainWork(ctx.getResFile(), - sem.getRootTasks(), + List> tasks = sem.getRootTasks(); + Task fetchTask = sem.getFetchTask(); + if (tasks == null) { + if (fetchTask != null) { + tasks = new ArrayList>(); + tasks.add(fetchTask); + } + } + else if (fetchTask != null) + tasks.add(fetchTask); + + rootTasks.add(TaskFactory.get(new explainWork(ctx.getResFile(), tasks, ((CommonTree)ast.getChild(0)).toStringTree(), extended), this.conf)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index 0a470697e..7aa4460e2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -36,7 +36,7 @@ public FunctionSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); } - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { String functionName = ast.getChild(0).getText(); String className = unescapeSQLString(ast.getChild(1).getText()); createFunctionDesc desc = new createFunctionDesc(functionName, className); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g index e16888ae0..0f98be7cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g @@ -79,7 +79,10 @@ TOK_CREATETABLE; TOK_DESCTABLE; TOK_ALTERTABLE_RENAME; TOK_ALTERTABLE_ADDCOLS; +TOK_ALTERTABLE_REPLACECOLS; +TOK_ALTERTABLE_DROPPARTS; TOK_SHOWTABLES; +TOK_SHOWPARTITIONS; TOK_CREATEEXTTABLE; TOK_DROPTABLE; TOK_TABCOLLIST; @@ -102,6 +105,11 @@ TOK_TABSORTCOLNAMEDESC; TOK_CHARSETLITERAL; TOK_CREATEFUNCTION; TOK_EXPLAIN; +TOK_TABLESERIALIZER; +TOK_TABLSERDEPROPERTIES; +TOK_TABLESERDEPROPLIST; +TOK_LIMIT; +TOKTABLESERDEPROPERTY; } @@ -161,6 +169,7 @@ dropStatement alterStatement : alterStatementRename | alterStatementAddCol + | alterStatementDropPartitions ; alterStatementRename @@ -169,16 +178,23 @@ alterStatementRename ; alterStatementAddCol - : KW_ALTER KW_TABLE Identifier KW_ADD KW_COLUMNS LPAREN columnNameTypeList RPAREN - -> ^(TOK_ALTERTABLE_ADDCOLS Identifier columnNameTypeList) + : KW_ALTER KW_TABLE Identifier (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN + -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS Identifier columnNameTypeList) + -> ^(TOK_ALTERTABLE_REPLACECOLS Identifier columnNameTypeList) + ; + +alterStatementDropPartitions + : KW_ALTER KW_TABLE Identifier KW_DROP partitionSpec (COMMA partitionSpec)* + -> ^(TOK_ALTERTABLE_DROPPARTS Identifier partitionSpec+) ; descStatement - : KW_DESCRIBE Identifier -> ^(TOK_DESCTABLE Identifier) + : KW_DESCRIBE (isExtended=KW_EXTENDED)? (tab=tabName) -> ^(TOK_DESCTABLE $tab $isExtended?) ; showStatement : KW_SHOW KW_TABLES showStmtIdentifier? -> ^(TOK_SHOWTABLES showStmtIdentifier?) + | KW_SHOW KW_PARTITIONS Identifier -> ^(TOK_SHOWPARTITIONS Identifier) ; createFunctionStatement @@ -211,6 +227,23 @@ tableRowFormat : KW_ROW KW_FORMAT KW_DELIMITED tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier? -> ^(TOK_TABLEROWFORMAT tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier?) + | KW_ROW KW_FORMAT KW_SERIALIZER name=StringLiteral tableSerializerProperties? + -> ^(TOK_TABLESERIALIZER $name tableSerializerProperties?) + ; + +tableSerializerProperties + : + KW_WITH KW_PROPERTIES LPAREN propertiesList RPAREN -> ^(TOK_TABLSERDEPROPERTIES propertiesList) + ; + +propertiesList + : + keyValueProperty (COMMA keyValueProperty)* -> ^(TOK_TABLESERDEPROPLIST keyValueProperty+) + ; + +keyValueProperty + : + key=StringLiteral EQUAL value=StringLiteral -> ^(TOKTABLESERDEPROPERTY $key $value) ; tableRowFormatFieldIdentifier @@ -328,14 +361,16 @@ regular_body whereClause? groupByClause? orderByClause? - clusterByClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT insertClause selectClause whereClause? groupByClause? orderByClause? clusterByClause?)) + clusterByClause? + limitClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT insertClause selectClause whereClause? groupByClause? orderByClause? clusterByClause? limitClause?)) | selectClause fromClause whereClause? groupByClause? orderByClause? - clusterByClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? orderByClause? clusterByClause?)) + clusterByClause? + limitClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? orderByClause? clusterByClause? limitClause?)) ; @@ -346,13 +381,15 @@ body whereClause? groupByClause? orderByClause? - clusterByClause? -> ^(TOK_INSERT insertClause? selectClause whereClause? groupByClause? orderByClause? clusterByClause?) + clusterByClause? + limitClause? -> ^(TOK_INSERT insertClause? selectClause whereClause? groupByClause? orderByClause? clusterByClause? limitClause?) | selectClause whereClause? groupByClause? orderByClause? - clusterByClause? -> ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? orderByClause? clusterByClause?) + clusterByClause? + limitClause? -> ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) selectClause whereClause? groupByClause? orderByClause? clusterByClause? limitClause?) ; insertClause @@ -367,6 +404,11 @@ destination | KW_TABLE tabName -> ^(tabName) ; +limitClause + : + KW_LIMIT num=Number -> ^(TOK_LIMIT $num) + ; + //----------------------- Rules for parsing selectClause ----------------------------- // select a,b,c ... selectClause @@ -581,7 +623,7 @@ precedenceFieldExpression precedenceUnaryOperator : - MINUS | TILDE + PLUS | MINUS | TILDE ; precedenceUnaryExpression @@ -741,6 +783,7 @@ KW_RIGHT : 'RIGHT'; KW_FULL : 'FULL'; KW_ON : 'ON'; KW_PARTITION : 'PARTITION'; +KW_PARTITIONS : 'PARTITIONS'; KW_TABLE: 'TABLE'; KW_TABLES: 'TABLES'; KW_SHOW: 'SHOW'; @@ -798,6 +841,7 @@ KW_OUT: 'OUT'; KW_OF: 'OF'; KW_CAST: 'CAST'; KW_ADD: 'ADD'; +KW_REPLACE: 'REPLACE'; KW_COLUMNS: 'COLUMNS'; KW_RLIKE: 'RLIKE'; KW_REGEXP: 'REGEXP'; @@ -805,6 +849,10 @@ KW_TEMPORARY: 'TEMPORARY'; KW_FUNCTION: 'FUNCTION'; KW_EXPLAIN: 'EXPLAIN'; KW_EXTENDED: 'EXTENDED'; +KW_SERIALIZER: 'SERIALIZER'; +KW_WITH: 'WITH'; +KW_PROPERTIES: 'SERDEPROPERTIES'; +KW_LIMIT: 'LIMIT'; // Operators diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java index 5c66da1b7..3d413878a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java @@ -156,7 +156,7 @@ private void applyConstraints(URI fromURI, URI toURI, Tree ast, boolean isLocal) } @Override - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { isLocal = isOverWrite = false; Tree from_t = ast.getChild(0); Tree table_t = ast.getChild(1); @@ -185,7 +185,7 @@ public void analyze(CommonTree ast, Context ctx) throws SemanticException { } // initialize destination table/partition - tableSpec ts = new tableSpec(db, (CommonTree) table_t); + tableSpec ts = new tableSpec(db, (CommonTree) table_t, true); URI toURI = (ts.partHandle != null) ? ts.partHandle.getDataLocation() : ts.tableHandle.getDataLocation(); // make sure the arguments make sense diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java index e2ac5d399..cfad2a4bc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java @@ -17,14 +17,6 @@ */ package org.apache.hadoop.hive.ql.parse; -/* - * PartitionPruner.java - * - * Created on April 9, 2008, 3:48 PM - * - * To change this template, choose Tools | Template Manager - * and open the template in the editor. - */ import java.util.*; @@ -73,7 +65,7 @@ public PartitionPruner(String tableAlias, QBMetaData metaData) { this.tableAlias = tableAlias; this.metaData = metaData; this.tab = metaData.getTableForAlias(tableAlias); - this.prunerExpr = new exprNodeConstantDesc(Boolean.TRUE); + this.prunerExpr = null; } /** @@ -106,8 +98,8 @@ private exprNodeDesc genExprNodeDesc(CommonTree expr) case HiveParser.TOK_COLREF: { assert(expr.getChildCount() == 2); - String tabAlias = SemanticAnalyzer.getTableName(expr); - String colName = SemanticAnalyzer.getSerDeFieldExpression(expr); + String tabAlias = expr.getChild(0).getText(); + String colName = expr.getChild(1).getText(); if (tabAlias == null || colName == null) { throw new SemanticException(ErrorMsg.INVALID_XPATH.getMsg(expr)); } @@ -115,11 +107,17 @@ private exprNodeDesc genExprNodeDesc(CommonTree expr) if (tabAlias.equals(tableAlias) && tab.isPartitionKey(colName)) { desc = new exprNodeColumnDesc(String.class, colName); } else { - // might be a column from another table try { - TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector( - this.metaData.getTableForAlias(tabAlias).getDeserializer().getObjectInspector()); - desc = new exprNodeConstantDesc(typeInfo.getStructFieldTypeInfo(colName), null); + // might be a column from another table + Table t = this.metaData.getTableForAlias(tabAlias); + if (t.isPartitionKey(colName)) { + desc = new exprNodeConstantDesc(String.class, null); + } + else { + TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector( + this.metaData.getTableForAlias(tabAlias).getDeserializer().getObjectInspector()); + desc = new exprNodeConstantDesc(typeInfo.getStructFieldTypeInfo(colName), null); + } } catch (SerDeException e){ throw new RuntimeException(e); } @@ -189,6 +187,37 @@ public static boolean mightBeUnknown(exprNodeDesc desc) { return false; } + public boolean hasPartitionPredicate(CommonTree expr) { + + int tokType = expr.getType(); + boolean hasPPred = false; + switch (tokType) { + case HiveParser.TOK_COLREF: { + + assert(expr.getChildCount() == 2); + String tabAlias = expr.getChild(0).getText(); + String colName = expr.getChild(1).getText(); + if (tabAlias.equals(tableAlias) && tab.isPartitionKey(colName)) { + hasPPred = true; + } + break; + } + + default: { + boolean isFunction = (expr.getType() == HiveParser.TOK_FUNCTION); + + // Create all children + int childrenBegin = (isFunction ? 1 : 0); + for (int ci=childrenBegin; ci prune() throws HiveException { LOG.trace("tabname = " + this.tab.getName()); LOG.trace("prune Expression = " + this.prunerExpr); - HashSet ret_parts = new HashSet(); + LinkedHashSet ret_parts = new LinkedHashSet(); try { StructObjectInspector rowObjectInspector = (StructObjectInspector)this.tab.getDeserializer().getObjectInspector(); Object[] rowWithPart = new Object[2]; InspectableObject inspectableObject = new InspectableObject(); - - ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr); + + ExprNodeEvaluator evaluator = null; + if (this.prunerExpr != null) + evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr); for(Partition part: Hive.get().getPartitions(this.tab)) { // Set all the variables here LinkedHashMap partSpec = part.getSpec(); @@ -237,14 +271,18 @@ public Set prune() throws HiveException { StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois); // evaluate the expression tree - evaluator.evaluate(rowWithPart, rowWithPartObjectInspector, inspectableObject); - LOG.trace("prune result for partition " + partSpec + ": " + inspectableObject.o); - if (!Boolean.FALSE.equals(inspectableObject.o)) { - LOG.debug("retained partition: " + partSpec); - ret_parts.add(part); - } else { - LOG.trace("pruned partition: " + partSpec); + if (evaluator != null) { + evaluator.evaluate(rowWithPart, rowWithPartObjectInspector, inspectableObject); + LOG.trace("prune result for partition " + partSpec + ": " + inspectableObject.o); + if (!Boolean.FALSE.equals(inspectableObject.o)) { + LOG.debug("retained partition: " + partSpec); + ret_parts.add(part); + } else { + LOG.trace("pruned partition: " + partSpec); + } } + else + ret_parts.add(part); } } catch (Exception e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java index ad0bb9c89..79fbce597 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.parse.QBParseInfo; import org.apache.hadoop.hive.ql.parse.QBMetaData; +import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -45,6 +46,7 @@ public class QB { private QBMetaData qbm; private QBJoinTree qbjoin; private String id; + private boolean isQuery; public void print(String msg) { LOG.info(msg + "alias=" + qbp.getAlias()); @@ -59,6 +61,9 @@ public void print(String msg) { public QB(String outer_id, String alias, boolean isSubQ) { aliasToTabs = new HashMap(); aliasToSubq = new HashMap(); + if (alias != null) { + alias = alias.toLowerCase(); + } qbp = new QBParseInfo(alias, isSubQ); qbm = new QBMetaData(); this.id = (outer_id == null ? alias : outer_id + ":" + alias); @@ -85,6 +90,7 @@ public void countSel() { } public boolean exists(String alias) { + alias = alias.toLowerCase(); if (aliasToTabs.get(alias) != null || aliasToSubq.get(alias) != null) return true; @@ -92,11 +98,11 @@ public boolean exists(String alias) { } public void setTabAlias(String alias, String tabName) { - aliasToTabs.put(alias, tabName); + aliasToTabs.put(alias.toLowerCase(), tabName); } public void setSubqAlias(String alias, QBExpr qbexpr) { - aliasToSubq.put(alias, qbexpr); + aliasToSubq.put(alias.toLowerCase(), qbexpr); } public String getId() { @@ -128,11 +134,11 @@ public Set getTabAliases() { } public QBExpr getSubqForAlias(String alias) { - return aliasToSubq.get(alias); + return aliasToSubq.get(alias.toLowerCase()); } public String getTabNameForAlias(String alias) { - return aliasToTabs.get(alias); + return aliasToTabs.get(alias.toLowerCase()); } public QBJoinTree getQbJoinTree() { @@ -142,4 +148,24 @@ public QBJoinTree getQbJoinTree() { public void setQbJoinTree(QBJoinTree qbjoin) { this.qbjoin = qbjoin; } + + public void setIsQuery(boolean isQuery) { + this.isQuery = isQuery; + } + + public boolean getIsQuery() { + return isQuery; + } + + public boolean isSelectStarQuery() { + if (!qbp.isSelectStarQuery() || !aliasToSubq.isEmpty()) + return false; + + Iterator> iter = qbm.getAliasToTable().entrySet().iterator(); + Table tab = ((Map.Entry)iter.next()).getValue(); + if (tab.isPartitioned()) + return false; + + return true; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java index cf7ea695f..f751bfd3e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java @@ -64,8 +64,16 @@ public void addOutputType(Class cls) { this.outTypes.add(cls); } + // All getXXX needs toLowerCase() because they are directly called from SemanticAnalyzer + // All setXXX does not need it because they are called from QB which already lowercases + // the aliases. + + public HashMap getAliasToTable() { + return aliasToTable; + } + public Table getTableForAlias(String alias) { - return this.aliasToTable.get(alias); + return this.aliasToTable.get(alias.toLowerCase()); } public void setSrcForAlias(String alias, Table tab) { @@ -89,23 +97,23 @@ public void setDestForAlias(String alias, String fname, boolean isDfsFile) { } public Integer getDestTypeForAlias(String alias) { - return this.nameToDestType.get(alias); + return this.nameToDestType.get(alias.toLowerCase()); } public Table getDestTableForAlias(String alias) { - return this.nameToDestTable.get(alias); + return this.nameToDestTable.get(alias.toLowerCase()); } public Partition getDestPartitionForAlias(String alias) { - return this.nameToDestPartition.get(alias); + return this.nameToDestPartition.get(alias.toLowerCase()); } public String getDestFileForAlias(String alias) { - return this.nameToDestFile.get(alias); + return this.nameToDestFile.get(alias.toLowerCase()); } public Table getSrcForAlias(String alias) { - return this.aliasToTable.get(alias); + return this.aliasToTable.get(alias.toLowerCase()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java index e322a7489..61910e1e9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java @@ -32,6 +32,7 @@ public class QBParseInfo { private boolean isSubQ; + private boolean canOptTopQ; private String alias; private CommonTree joinExpr; private HashMap aliasToSrc; @@ -41,6 +42,8 @@ public class QBParseInfo { private HashMap destToWhereExpr; private HashMap destToGroupby; private HashMap destToClusterby; + private HashMap destToLimit; + private int outerQueryLimit; // used by GroupBy private HashMap > destToAggregationExprs; @@ -57,12 +60,15 @@ public QBParseInfo(String alias, boolean isSubQ) { this.destToWhereExpr = new HashMap(); this.destToGroupby = new HashMap(); this.destToClusterby = new HashMap(); + this.destToLimit = new HashMap(); this.destToAggregationExprs = new HashMap >(); this.destToDistinctFuncExpr = new HashMap(); this.alias = alias; this.isSubQ = isSubQ; + this.canOptTopQ = false; + this.outerQueryLimit = -1; } public void setAggregationExprsForClause(String clause, HashMap aggregationTrees) { @@ -102,7 +108,7 @@ public void setClusterByExprForClause(String clause, CommonTree ast) { } public void setSrcForAlias(String alias, CommonTree ast) { - this.aliasToSrc.put(alias, ast); + this.aliasToSrc.put(alias.toLowerCase(), ast); } public Set getClauseNames() { @@ -134,7 +140,7 @@ public CommonTree getClusterByForClause(String clause) { } public CommonTree getSrcForAlias(String alias) { - return this.aliasToSrc.get(alias); + return this.aliasToSrc.get(alias.toLowerCase()); } public String getAlias() { @@ -145,6 +151,14 @@ public boolean getIsSubQ() { return this.isSubQ; } + public boolean getCanOptTopQ() { + return this.canOptTopQ; + } + + public void setCanOptTopQ(boolean canOptTopQ) { + this.canOptTopQ = canOptTopQ; + } + public CommonTree getJoinExpr() { return this.joinExpr; } @@ -152,12 +166,87 @@ public CommonTree getJoinExpr() { public void setJoinExpr(CommonTree joinExpr) { this.joinExpr = joinExpr; } - + public TableSample getTabSample(String alias) { - return this.nameToSample.get(alias); + return this.nameToSample.get(alias.toLowerCase()); } public void setTabSample(String alias, TableSample tableSample) { - this.nameToSample.put(alias, tableSample); + this.nameToSample.put(alias.toLowerCase(), tableSample); + } + + public void setDestLimit(String dest, Integer limit) { + this.destToLimit.put(dest, limit); } + + public Integer getDestLimit(String dest) { + return this.destToLimit.get(dest); + } + + /** + * @return the outerQueryLimit + */ + public int getOuterQueryLimit() { + return outerQueryLimit; + } + + /** + * @param outerQueryLimit the outerQueryLimit to set + */ + public void setOuterQueryLimit(int outerQueryLimit) { + this.outerQueryLimit = outerQueryLimit; + } + + public boolean isSelectStarQuery() { + if (isSubQ || + (joinExpr != null) || + (!nameToSample.isEmpty()) || + (!destToWhereExpr.isEmpty()) || + (!destToGroupby.isEmpty()) || + (!destToClusterby.isEmpty())) + return false; + + Iterator>> aggrIter = destToAggregationExprs.entrySet().iterator(); + while (aggrIter.hasNext()) { + HashMap h = aggrIter.next().getValue(); + if ((h != null) && (!h.isEmpty())) + return false; + } + + if (!destToDistinctFuncExpr.isEmpty()) { + Iterator> distn = destToDistinctFuncExpr.entrySet().iterator(); + while (distn.hasNext()) { + CommonTree ct = distn.next().getValue(); + if (ct != null) + return false; + } + } + + Iterator> iter = nameToDest.entrySet().iterator(); + while (iter.hasNext()) { + Map.Entry entry = iter.next(); + CommonTree v = entry.getValue(); + if (!(((CommonTree)v.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) + return false; + } + + iter = destToSelExpr.entrySet().iterator(); + while (iter.hasNext()) { + Map.Entry entry = iter.next(); + CommonTree selExprList = entry.getValue(); + // Iterate over the selects + for (int i = 0; i < selExprList.getChildCount(); ++i) { + + // list of the columns + CommonTree selExpr = (CommonTree) selExprList.getChild(i); + CommonTree sel = (CommonTree)selExpr.getChild(0); + + if (sel.getToken().getType() != HiveParser.TOK_ALLCOLREF) + return false; + } + } + + return true; + } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java index e2718a8e6..d1639b12a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java @@ -50,6 +50,9 @@ public RowResolver() { } public void put(String tab_alias, String col_alias, ColumnInfo colInfo) { + if (tab_alias != null) { + tab_alias = tab_alias.toLowerCase(); + } col_alias = col_alias.toLowerCase(); if (rowSchema.getSignature() == null) { rowSchema.setSignature(new Vector()); @@ -71,10 +74,11 @@ public void put(String tab_alias, String col_alias, ColumnInfo colInfo) { } public boolean hasTableAlias(String tab_alias) { - return rslvMap.get(tab_alias) != null ? true : false; + return rslvMap.get(tab_alias.toLowerCase()) != null; } public ColumnInfo get(String tab_alias, String col_alias) { + tab_alias = tab_alias.toLowerCase(); col_alias = col_alias.toLowerCase(); HashMap f_map = rslvMap.get(tab_alias); if (f_map == null) { @@ -88,7 +92,7 @@ public Vector getColumnInfos() { } public HashMap getFieldMap(String tab_alias) { - return rslvMap.get(tab_alias); + return rslvMap.get(tab_alias.toLowerCase()); } public int getPosition(String internalName) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 241f9049f..127defc6f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -24,7 +24,10 @@ import java.lang.reflect.Method; import org.antlr.runtime.tree.*; +import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; @@ -35,8 +38,10 @@ import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils; +import org.apache.hadoop.hive.ql.udf.UDFOPPositive; import org.apache.hadoop.hive.ql.exec.*; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.fs.Path; import org.apache.commons.lang.StringUtils; @@ -342,7 +347,12 @@ else if (isJoinToken(frm)) skipRecursion = true; } break; - + + case HiveParser.TOK_LIMIT: + { + qbp.setDestLimit(ctx_1.dest, new Integer(ast.getChild(0).getText())); + } + break; default: skipRecursion = false; break; @@ -384,8 +394,18 @@ private void genPartitionPruners(QB qb) throws SemanticException { // Pass each where clause to the pruner QBParseInfo qbp = qb.getParseInfo(); for(String clause: qbp.getClauseNames()) { - if (qbp.getWhrForClause(clause) != null) { - pruner.addExpression((CommonTree)qbp.getWhrForClause(clause).getChild(0)); + + CommonTree whexp = (CommonTree)qbp.getWhrForClause(clause); + + if (pruner.getTable().isPartitioned() && + conf.getVar(HiveConf.ConfVars.HIVEPARTITIONPRUNER).equalsIgnoreCase("strict") && + (whexp == null || !pruner.hasPartitionPredicate((CommonTree)whexp.getChild(0)))) { + throw new SemanticException(ErrorMsg.NO_PARTITION_PREDICATE.getMsg(whexp != null ? whexp : qbp.getSelForClause(clause), + " for Alias " + alias + " Table " + pruner.getTable().getName())); + } + + if (whexp != null) { + pruner.addExpression((CommonTree)whexp.getChild(0)); } } @@ -466,7 +486,7 @@ private void getMetaData(QB qb) throws SemanticException { CommonTree ast = qbp.getDestForClause(name); switch (ast.getToken().getType()) { case HiveParser.TOK_TAB: { - tableSpec ts = new tableSpec(this.db, ast); + tableSpec ts = new tableSpec(this.db, ast, true); if(ts.partSpec == null) { // This is a table @@ -487,6 +507,7 @@ private void getMetaData(QB qb) throws SemanticException { { fname = getTmpFileName(); ctx.setResDir(new Path(fname)); + qb.setIsQuery(true); } qb.getMetaData().setDestForAlias(name, fname, (ast.getToken().getType() == HiveParser.TOK_DIR)); @@ -643,138 +664,50 @@ private void parseJoinCondition(CommonTree joinParseTree, } } - /** - * Returns the expression for the SerDe field. - * @return null if the tree cannot be represented by a SerDe field. - */ - public static String getSerDeFieldExpression(CommonTree node) { - if (node.getToken().getType() == HiveParser.TOK_COLREF){ - // String tabAlias = node.getChild(0).getText(); - String colName = node.getChild(1).getText(); - return colName; - } - if (node.getChildCount() != 2) { - return null; - } - String left = getSerDeFieldExpression((CommonTree)node.getChild(0)); - if (left == null) return null; - - if (node.getToken().getType() == HiveParser.DOT) { - return left + '.' + node.getChild(1).getText(); - } else if (node.getToken().getType() == HiveParser.LSQUARE){ - return left + '[' + node.getChild(1).getText() + ']'; - } - return null; - } - - /** - * Returns the table name for the SerDe field. - * @return null if the tree cannot be represented by a SerDe field. - */ - public static String getTableName(CommonTree node) { - while (node.getToken().getType() != HiveParser.TOK_COLREF) { - if (node.getChildCount() != 2) return null; - node = (CommonTree) node.getChild(0); - } - return node.getChild(0).getText(); - } @SuppressWarnings("nls") - private OperatorInfoList genFilterPlan(String dest, QB qb, - OperatorInfoList input) throws SemanticException { - - // We can assert here that the input list is of size one - if (input.size() != 1) { - throw new SemanticException("Filter has more than one inputs"); - } + private OperatorInfo genFilterPlan(String dest, QB qb, + OperatorInfo input) throws SemanticException { CommonTree whereExpr = qb.getParseInfo().getWhrForClause(dest); - OperatorInfoList output = (OperatorInfoList)input.clone(); - output.get(0).setOp( + OperatorInfo output = (OperatorInfo)input.clone(); + output.setOp( OperatorFactory.getAndMakeChild( new filterDesc(genExprNodeDesc((CommonTree)whereExpr.getChild(0), qb.getParseInfo().getAlias(), - input.get(0).getRowResolver())), - new RowSchema(output.get(0).getRowResolver().getColumnInfos()), - input.get(0).getOp() + input.getRowResolver())), + new RowSchema(output.getRowResolver().getColumnInfos()), + input.getOp() ) ); - LOG.debug("Created Filter Plan for " + qb.getId() + ":" + dest + " row schema: " + output.get(0).getRowResolver().toString()); + LOG.debug("Created Filter Plan for " + qb.getId() + ":" + dest + " row schema: " + output.getRowResolver().toString()); return output; } @SuppressWarnings("nls") private void genColList(String alias, CommonTree sel, ArrayList col_list, RowResolver input, Integer pos, - RowResolver output, String colAlias) throws SemanticException { + RowResolver output) throws SemanticException { // TODO: Have to put in the support for AS clause - String tabName = ((CommonTree) sel.getChild(0)).getToken().getText(); - ArrayList fieldTypeList = new ArrayList(); - ArrayList fieldList = new ArrayList(); - if (sel.getToken().getType() == HiveParser.TOK_ALLCOLREF) { - // This is the tab.* case - // In this case add all the columns to the fieldList - // from the input schema - for(ColumnInfo colInfo: input.getColumnInfos()) { - String name = colInfo.getInternalName(); - String [] tmp = input.reverseLookup(name); - fieldList.add(name); - fieldTypeList.add(colInfo.getType()); - output.put(alias, tmp[1], new ColumnInfo(pos.toString(), colInfo.getType(), - colInfo.getIsVirtual())); - pos = Integer.valueOf(pos.intValue() + 1); - } - } else { - // For now only allow columns of the form tab.col - if (sel.getChildCount() == 1) { - throw new SemanticException(ErrorMsg.NO_TABLE_ALIAS.getMsg(sel.getChild(0))); - } - - // Lookup the name from the input - ColumnInfo colInfo = input.get(tabName, sel.getChild(1).getText()); - - // TODO: Hack it up for now: Later we have to pass the QB in order to check for the - // table alias instead of relying on input.hasTableAlias - if (colInfo == null && input.getIsExprResolver()) { - throw new SemanticException(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(sel)); - } else if (!input.hasTableAlias(tabName)) { - throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(sel.getChild(0))); - } else if (colInfo == null) { - throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel.getChild(1))); - } - - // Add to the field list - fieldList.add(colInfo.getInternalName()); - fieldTypeList.add(colInfo.getType()); - // Add to the output - if (!StringUtils.isEmpty(alias) && - (output.get(alias, colAlias) != null)) { - throw new SemanticException(ErrorMsg.AMBIGOUS_COLUMN.getMsg(sel.getChild(1))); - } - output.put(alias, colAlias, - new ColumnInfo(pos.toString(), colInfo.getType(), colInfo.getIsVirtual())); - pos = Integer.valueOf(pos.intValue() + 1); - } - - // Generate the corresponding expressions - for (int i=0; i col_list = new ArrayList(); RowResolver out_rwsch = new RowResolver(); CommonTree trfm = null; + String alias = qb.getParseInfo().getAlias(); Integer pos = Integer.valueOf(0); // Iterate over the selects @@ -884,10 +813,9 @@ private OperatorInfoList genSelectPlan(String dest, QB qb, String colAlias = getColAlias(selExpr, "_C" + i); CommonTree sel = (CommonTree)selExpr.getChild(0); - if (sel.getToken().getType() == HiveParser.TOK_COLREF || - sel.getToken().getType() == HiveParser.TOK_ALLCOLREF) { + if (sel.getToken().getType() == HiveParser.TOK_ALLCOLREF) { genColList(qb.getParseInfo().getAlias(), sel, col_list, - input.get(0).getRowResolver(), pos, out_rwsch, colAlias); + input.getRowResolver(), pos, out_rwsch); } else if (sel.getToken().getType() == HiveParser.TOK_TRANSFORM) { if (i > 0) { throw new SemanticException(ErrorMsg.INVALID_TRANSFORM.getMsg(sel)); @@ -896,31 +824,37 @@ private OperatorInfoList genSelectPlan(String dest, QB qb, CommonTree cols = (CommonTree) trfm.getChild(0); for (int j = 0; j < cols.getChildCount(); ++j) { CommonTree expr = (CommonTree) cols.getChild(j); - if (expr.getToken().getType() == HiveParser.TOK_COLREF || - expr.getToken().getType() == HiveParser.TOK_ALLCOLREF) { - genColList(qb.getParseInfo().getAlias(), expr, - col_list, input.get(0).getRowResolver(), - pos, out_rwsch, - expr.getChild(1).getText()); + if (expr.getToken().getType() == HiveParser.TOK_ALLCOLREF) { + genColList(alias, expr, + col_list, input.getRowResolver(), + pos, out_rwsch); } else { - exprNodeDesc exp = genExprNodeDesc(expr, qb.getParseInfo() - .getAlias(), input.get(0).getRowResolver()); + exprNodeDesc exp = genExprNodeDesc(expr, alias, input.getRowResolver()); col_list.add(exp); - out_rwsch.put(qb.getParseInfo().getAlias(), expr.getText(), + if (!StringUtils.isEmpty(alias) && + (out_rwsch.get(alias, colAlias) != null)) { + throw new SemanticException(ErrorMsg.AMBIGOUS_COLUMN.getMsg(expr.getChild(1))); + } + + out_rwsch.put(alias, expr.getText(), new ColumnInfo((Integer.valueOf(pos)).toString(), - String.class, false)); // Everything is a string right now + exp.getTypeInfo())); // Everything is a string right now } } } else { // Case when this is an expression exprNodeDesc exp = genExprNodeDesc(sel, qb.getParseInfo() - .getAlias(), input.get(0).getRowResolver()); + .getAlias(), input.getRowResolver()); col_list.add(exp); + if (!StringUtils.isEmpty(alias) && + (out_rwsch.get(alias, colAlias) != null)) { + throw new SemanticException(ErrorMsg.AMBIGOUS_COLUMN.getMsg(sel.getChild(1))); + } // Since the as clause is lacking we just use the text representation // of the expression as the column name - out_rwsch.put(qb.getParseInfo().getAlias(), colAlias, + out_rwsch.put(alias, colAlias, new ColumnInfo((Integer.valueOf(pos)).toString(), - String.class, false)); // Everything is a string right now + exp.getTypeInfo())); // Everything is a string right now } pos = Integer.valueOf(pos.intValue() + 1); } @@ -931,51 +865,23 @@ private OperatorInfoList genSelectPlan(String dest, QB qb, } } - OperatorInfoList output = (OperatorInfoList) input.clone(); - output.get(0).setOp(OperatorFactory.getAndMakeChild( + OperatorInfo output = (OperatorInfo) input.clone(); + output.setOp(OperatorFactory.getAndMakeChild( new selectDesc(col_list), new RowSchema(out_rwsch.getColumnInfos()), - input.get(0).getOp())); + input.getOp())); - output.get(0).setRowResolver(out_rwsch); + output.setRowResolver(out_rwsch); if (trfm != null) { output = genScriptPlan(trfm, qb, output); } LOG.debug("Created Select Plan for clause: " + dest + " row schema: " - + output.get(0).getRowResolver().toString()); + + output.getRowResolver().toString()); return output; } - private OperatorInfo genGroupByPlanSelectOperator( - QBParseInfo parseInfo, String dest, OperatorInfo groupByOperatorInfo) - throws SemanticException { - - RowResolver groupByOutputRowResolver = groupByOperatorInfo.getRowResolver(); - RowResolver selectOutputRowResolver = new RowResolver(); - ArrayList selectCols = new ArrayList(); - CommonTree selectExpr = parseInfo.getSelForClause(dest); - for (int i = 0; i < selectExpr.getChildCount(); ++i) { - CommonTree sel = (CommonTree) selectExpr.getChild(i).getChild(0); - - // We need to recurse into the expression until we hit a UDAF or keys, - // which are both in groupByOutputToColumns. - exprNodeDesc exp = genExprNodeDesc(sel, parseInfo.getAlias(), - groupByOutputRowResolver); - - selectCols.add(exp); - - selectOutputRowResolver.put(parseInfo.getAlias(), sel.getText(), - new ColumnInfo((Integer.valueOf(i)).toString(), - String.class, false)); // Everything is a class right now - } - - return new OperatorInfo(OperatorFactory.getAndMakeChild(new selectDesc( - selectCols), new RowSchema(selectOutputRowResolver.getColumnInfos()), - groupByOperatorInfo.getOp()), selectOutputRowResolver); - } - @SuppressWarnings("nls") private OperatorInfo genGroupByPlanGroupByOperator( QBParseInfo parseInfo, String dest, OperatorInfo reduceSinkOperatorInfo, @@ -999,8 +905,7 @@ private OperatorInfo genGroupByPlanGroupByOperator( groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName())); String field = (Integer.valueOf(i)).toString(); groupByOutputRowResolver.put("",grpbyExpr.toStringTree(), - new ColumnInfo(field, exprInfo.getType(), - exprInfo.getIsVirtual())); + new ColumnInfo(field, exprInfo.getType())); } // For each aggregation HashMap aggregationTrees = parseInfo @@ -1037,7 +942,7 @@ private OperatorInfo genGroupByPlanGroupByOperator( value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)); groupByOutputRowResolver.put("",value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() -1).toString(), - String.class, false)); // Everything is a string right now + String.class)); // Everything is a string right now } return new OperatorInfo( @@ -1071,7 +976,7 @@ private OperatorInfo genGroupByPlanGroupByOpForward( groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName())); String field = (Integer.valueOf(i)).toString(); outputRS.put("", text, - new ColumnInfo(field, exprInfo.getType(), exprInfo.getIsVirtual())); + new ColumnInfo(field, exprInfo.getType())); } // For each aggregation @@ -1109,7 +1014,7 @@ private OperatorInfo genGroupByPlanGroupByOpForward( value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)); outputRS.put("",value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() -1).toString(), - String.class, false)); // Everything is a string right now + String.class)); // Everything is a string right now } return new OperatorInfo( @@ -1139,7 +1044,7 @@ private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, if (reduceSinkOutputRowResolver.get("", text) == null) { reduceSinkOutputRowResolver.put("", text, new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), - String.class, false)); // Everything is a string right now + String.class)); // Everything is a string right now } else { throw new SemanticException(ErrorMsg.DUPLICATE_GROUPBY_KEY.getMsg(grpbyExpr)); } @@ -1156,7 +1061,7 @@ private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, reduceKeys.add(genExprNodeDesc(parameter, parseInfo.getAlias(), reduceSinkInputRowResolver)); reduceSinkOutputRowResolver.put("", text, new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), - String.class, false)); // Everything is a string right now + String.class)); // Everything is a string right now } } } @@ -1175,13 +1080,14 @@ private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, reduceValues.add(genExprNodeDesc(parameter, parseInfo.getAlias(), reduceSinkInputRowResolver)); reduceSinkOutputRowResolver.put("", text, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), - String.class, false)); // Everything is a string right now + String.class)); // Everything is a string right now } } } return new OperatorInfo( - OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, numPartitionFields), + OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, -1, numPartitionFields, + -1, false), new RowSchema(reduceSinkOutputRowResolver.getColumnInfos()), inputOperatorInfo.getOp()), reduceSinkOutputRowResolver @@ -1205,7 +1111,7 @@ private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, assert (outputRS.get("", text) == null); outputRS.put("", text, new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), - String.class, false)); + String.class)); } else { // dummy key @@ -1228,8 +1134,8 @@ private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, if (outputRS.get(key, field) == null) { reduceValues.add(new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName())); - outputRS.put(key, field, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), valueInfo.getType(), - valueInfo.getIsVirtual())); + outputRS.put(key, field, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), + valueInfo.getType())); } } } @@ -1246,7 +1152,7 @@ private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, reduceValues.add(grpbyExprNode); outputRS.put("", text, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), - grpbyExprNode.getTypeInfo(), false)); + grpbyExprNode.getTypeInfo())); } } @@ -1263,14 +1169,15 @@ private OperatorInfo genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo, reduceValues.add(pNode); outputRS.put("", text, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), - pNode.getTypeInfo(), false)); + pNode.getTypeInfo())); } } } } return new OperatorInfo( - OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, distinctText == null ? -1 : 1), + OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, + -1, distinctText == null ? -1 : 1, -1, false), new RowSchema(outputRS.getColumnInfos()), input.getOp()), outputRS); } @@ -1305,7 +1212,7 @@ private OperatorInfo genGroupByPlanReduceSinkOperator2MR( reduceKeys.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), field)); reduceSinkOutputRowResolver2.put("", grpbyExpr.toStringTree(), new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + field, - String.class, false)); // Everything is a string right now + String.class)); // Everything is a string right now } // Get partial aggregation results and store in reduceValues ArrayList reduceValues = new ArrayList(); @@ -1314,15 +1221,16 @@ private OperatorInfo genGroupByPlanReduceSinkOperator2MR( .getAggregationExprsForClause(dest); for (Map.Entry entry : aggregationTrees.entrySet()) { reduceValues.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), - (Integer.valueOf(inputField)).toString())); + (Integer.valueOf(inputField)).toString())); inputField++; reduceSinkOutputRowResolver2.put("", ((CommonTree)entry.getValue()).toStringTree(), new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + (Integer.valueOf(reduceValues.size()-1)).toString(), - String.class, false)); // Everything is a string right now + String.class)); // Everything is a string right now } return new OperatorInfo( - OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, numPartitionFields), + OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, -1, + numPartitionFields, -1, true), new RowSchema(reduceSinkOutputRowResolver2.getColumnInfos()), groupByOperatorInfo.getOp()), reduceSinkOutputRowResolver2 @@ -1351,8 +1259,7 @@ private OperatorInfo genGroupByPlanGroupByOperator2MR( groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), expression)); String field = (Integer.valueOf(i)).toString(); groupByOutputRowResolver2.put("",grpbyExpr.toStringTree(), - new ColumnInfo(field, exprInfo.getType(), - exprInfo.getIsVirtual())); + new ColumnInfo(field, exprInfo.getType())); } HashMap aggregationTrees = parseInfo .getAggregationExprsForClause(dest); @@ -1374,8 +1281,7 @@ private OperatorInfo genGroupByPlanGroupByOperator2MR( aggregations.add(new aggregationDesc(aggClass, aggParameters, false)); groupByOutputRowResolver2.put("", value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() - 1).toString(), - paraExprInfo.getType(), - paraExprInfo.getIsVirtual())); // Everything is a string right now + paraExprInfo.getType())); // Everything is a string right now } return new OperatorInfo( @@ -1402,14 +1308,10 @@ private OperatorInfo genGroupByPlanGroupByOperator2MR( * @throws SemanticException */ @SuppressWarnings({ "unused", "nls" }) - private OperatorInfoList genGroupByPlan1MR(String dest, QB qb, - OperatorInfoList inputList) throws SemanticException { + private OperatorInfo genGroupByPlan1MR(String dest, QB qb, + OperatorInfo input) throws SemanticException { - // We can assert here that the input list is of size one - if (inputList.size() != 1) { - throw new SemanticException("Select has more than one inputs"); - } - OperatorInfo inputOperatorInfo = inputList.get(0); + OperatorInfo inputOperatorInfo = input; QBParseInfo parseInfo = qb.getParseInfo(); // ////// 1. Generate ReduceSinkOperator @@ -1422,14 +1324,7 @@ private OperatorInfoList genGroupByPlan1MR(String dest, QB qb, OperatorInfo groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo, dest, reduceSinkOperatorInfo, groupByDesc.Mode.COMPLETE); - // ////// 3. Generate SelectOperator - OperatorInfo selectOperatorInfo = genGroupByPlanSelectOperator(parseInfo, - dest, groupByOperatorInfo); - - // ////// 4. Create output - OperatorInfoList output = new OperatorInfoList(); - output.add(selectOperatorInfo); - return output; + return groupByOperatorInfo; } /** @@ -1450,14 +1345,10 @@ private OperatorInfoList genGroupByPlan1MR(String dest, QB qb, * @throws SemanticException */ @SuppressWarnings("nls") - private OperatorInfoList genGroupByPlan2MR(String dest, QB qb, - OperatorInfoList inputList) throws SemanticException { + private OperatorInfo genGroupByPlan2MR(String dest, QB qb, + OperatorInfo input) throws SemanticException { - // We can assert here that the input list is of size one - if (inputList.size() != 1) { - throw new SemanticException("Select has more than one inputs"); - } - OperatorInfo inputOperatorInfo = inputList.get(0); + OperatorInfo inputOperatorInfo = input; QBParseInfo parseInfo = qb.getParseInfo(); // ////// 1. Generate ReduceSinkOperator @@ -1483,14 +1374,7 @@ private OperatorInfoList genGroupByPlan2MR(String dest, QB qb, OperatorInfo groupByOperatorInfo2 = genGroupByPlanGroupByOperator2MR( parseInfo, dest, reduceSinkOperatorInfo2); - // ////// 5. Generate SelectOperator - OperatorInfo selectOperatorInfo = genGroupByPlanSelectOperator(parseInfo, - dest, groupByOperatorInfo2); - - // ////// 6. Create output - OperatorInfoList output = new OperatorInfoList(); - output.add(selectOperatorInfo); - return output; + return groupByOperatorInfo2; } /** @@ -1500,14 +1384,10 @@ private OperatorInfoList genGroupByPlan2MR(String dest, QB qb, * shared by all groupbys. */ @SuppressWarnings("nls") - private OperatorInfoList genGroupByPlan3MR(String dest, QB qb, - OperatorInfoList inputList) throws SemanticException { + private OperatorInfo genGroupByPlan3MR(String dest, QB qb, + OperatorInfo input) throws SemanticException { - // We can assert here that the input list is of size one - if (inputList.size() != 1) { - throw new SemanticException("Select has more than one inputs"); - } - OperatorInfo inputOperatorInfo = inputList.get(0); + OperatorInfo inputOperatorInfo = input; QBParseInfo parseInfo = qb.getParseInfo(); // ////// Generate GroupbyOperator @@ -1523,19 +1403,12 @@ private OperatorInfoList genGroupByPlan3MR(String dest, QB qb, OperatorInfo groupByOperatorInfo2 = genGroupByPlanGroupByOperator2MR( parseInfo, dest, reduceSinkOperatorInfo2); - // ////// Generate SelectOperator - OperatorInfo selectOperatorInfo = genGroupByPlanSelectOperator(parseInfo, - dest, groupByOperatorInfo2); - - // ////// Create output - OperatorInfoList output = new OperatorInfoList(); - output.add(selectOperatorInfo); - return output; + return groupByOperatorInfo2; } @SuppressWarnings("nls") - private OperatorInfoList genConversionOps(String dest, QB qb, - OperatorInfoList input) throws SemanticException { + private OperatorInfo genConversionOps(String dest, QB qb, + OperatorInfo input) throws SemanticException { Integer dest_type = qb.getMetaData().getDestTypeForAlias(dest); Table dest_tab = null; @@ -1556,50 +1429,12 @@ private OperatorInfoList genConversionOps(String dest, QB qb, } } - /* - // We have the table object here - go over the row resolver - // and check all the types are the same - // Vector srcOpns = input.get(0).getRowResolver().getColumnInfos(); - - Vector insOpns = new Vector(); - try { - StructObjectInspector rowObjectInspector = (StructObjectInspector)dest_tab.getDeserializer().getObjectInspector(); - List fields = rowObjectInspector.getAllStructFieldRefs(); - for (int i=0; i colInfos = inputRR.getColumnInfos(); + + boolean first = true; + for (ColumnInfo colInfo:colInfos) { + String[] nm = inputRR.reverseLookup(colInfo.getInternalName()); + if (!first) + cols = cols.concat(","); + + first = false; + if (nm[0] == null) + cols = cols.concat(nm[1]); + else + cols = cols.concat(nm[0] + "." + nm[1]); + } + this.loadFileWork.add(new loadFileDesc(queryTmpdir, dest_path, - (dest_type.intValue() == QBMetaData.DEST_DFS_FILE))); - break; + (dest_type.intValue() == QBMetaData.DEST_DFS_FILE), cols)); + break; } default: throw new SemanticException("Unknown destination type: " + dest_type); } - OperatorInfoList output = (OperatorInfoList)input.clone(); - output.get(0).setOp( + OperatorInfo output = (OperatorInfo)input.clone(); + output.setOp( OperatorFactory.getAndMakeChild( new fileSinkDesc(queryTmpdir, table_desc), - new RowSchema(output.get(0).getRowResolver().getColumnInfos()), - input.get(0).getOp() + new RowSchema(output.getRowResolver().getColumnInfos()), input.getOp() ) ); LOG.debug("Created FileSink Plan for clause: " + dest + "dest_path: " + dest_path + " row schema: " - + output.get(0).getRowResolver().toString()); + + output.getRowResolver().toString()); return output; } @SuppressWarnings("nls") - private OperatorInfoList genReduceSinkPlan(String dest, QB qb, - OperatorInfoList input) throws SemanticException { + private OperatorInfo genLimitPlan(String dest, QB qb, OperatorInfo input, int limit) throws SemanticException { + // A map-only job can be optimized - instead of converting it to a map-reduce job, we can have another map + // job to do the same to avoid the cost of sorting in the map-reduce phase. A better approach would be to + // write into a local file and then have a map-only job. + // Add the limit operator to get the value fields + + OperatorInfo limitMap = (OperatorInfo)input.clone(); + limitMap.setOp( + OperatorFactory.getAndMakeChild( + new limitDesc(limit), new RowSchema(limitMap.getRowResolver().getColumnInfos()), + input.getOp() + ) + ); + + LOG.debug("Created LimitOperator Plan for clause: " + dest + " row schema: " + + limitMap.getRowResolver().toString()); + + return limitMap; + } + + @SuppressWarnings("nls") + private OperatorInfo genLimitMapRedPlan(String dest, QB qb, OperatorInfo input, int limit, boolean isOuterQuery) throws SemanticException { + // A map-only job can be optimized - instead of converting it to a map-reduce job, we can have another map + // job to do the same to avoid the cost of sorting in the map-reduce phase. A better approach would be to + // write into a local file and then have a map-only job. + // Add the limit operator to get the value fields + OperatorInfo curr = genLimitPlan(dest, qb, input, limit); - // We can assert here that the input list is of size one - if (input.size() != 1) { - throw new SemanticException("Select has more than one inputs"); - } + if (isOuterQuery) + return curr; + + // Create a reduceSink operator followed by another limit + curr = genReduceSinkPlan(dest, qb, curr, 1); + return genLimitPlan(dest, qb, curr, limit); + } + + @SuppressWarnings("nls") + private OperatorInfo genReduceSinkPlan(String dest, QB qb, + OperatorInfo input, int numReducers) throws SemanticException { // First generate the expression for the key // The cluster by clause has the aliases for the keys - CommonTree clby = qb.getParseInfo().getClusterByForClause(dest); ArrayList keyCols = new ArrayList(); - int ccount = clby.getChildCount(); - for(int i=0; i valueCols = new ArrayList(); // For the generation of the values expression just get the inputs // signature and generate field expressions for those - for(ColumnInfo colInfo: input.get(0).getRowResolver().getColumnInfos()) { + for(ColumnInfo colInfo: input.getRowResolver().getColumnInfos()) { valueCols.add(new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName())); } - OperatorInfoList interim = (OperatorInfoList)input.clone(); - interim.get(0).setOp( + OperatorInfo interim = (OperatorInfo)input.clone(); + interim.setOp( OperatorFactory.getAndMakeChild( - PlanUtils.getReduceSinkDesc(keyCols, valueCols, keyCols.size()), - new RowSchema(interim.get(0).getRowResolver().getColumnInfos()), - input.get(0).getOp() + PlanUtils.getReduceSinkDesc(keyCols, valueCols, -1, keyCols.size(), numReducers, false), + new RowSchema(interim.getRowResolver().getColumnInfos()), + input.getOp() ) ); // Add the extract operator to get the value fields RowResolver out_rwsch = new RowResolver(); - RowResolver interim_rwsch = interim.get(0).getRowResolver(); + RowResolver interim_rwsch = interim.getRowResolver(); Integer pos = Integer.valueOf(0); for(ColumnInfo colInfo: interim_rwsch.getColumnInfos()) { String [] info = interim_rwsch.reverseLookup(colInfo.getInternalName()); out_rwsch.put(info[0], info[1], - new ColumnInfo(pos.toString(), colInfo.getType(), colInfo.getIsVirtual())); + new ColumnInfo(pos.toString(), colInfo.getType())); pos = Integer.valueOf(pos.intValue() + 1); } - OperatorInfoList output = (OperatorInfoList)interim.clone(); - output.get(0).setOp( + OperatorInfo output = (OperatorInfo)interim.clone(); + output.setOp( OperatorFactory.getAndMakeChild( - new extractDesc( - new exprNodeColumnDesc(String.class, Utilities.ReduceField.VALUE.toString()) - ), + new extractDesc(new exprNodeColumnDesc(String.class, Utilities.ReduceField.VALUE.toString())), new RowSchema(out_rwsch.getColumnInfos()), - interim.get(0).getOp() + interim.getOp() ) ); - output.get(0).setRowResolver(out_rwsch); + output.setRowResolver(out_rwsch); LOG.debug("Created ReduceSink Plan for clause: " + dest + " row schema: " - + output.get(0).getRowResolver().toString()); + + output.getRowResolver().toString()); return output; } @@ -1760,8 +1644,8 @@ private OperatorInfo genJoinOperatorChildren(QBJoinTree join, OperatorInfo left, ColumnInfo valueInfo = inputRS.get(key, field); keyDesc.add(new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName())); if (outputRS.get(key, field) == null) - outputRS.put(key, field, new ColumnInfo((Integer.valueOf(outputPos++)).toString(), valueInfo.getType(), - valueInfo.getIsVirtual())); + outputRS.put(key, field, new ColumnInfo((Integer.valueOf(outputPos++)).toString(), + valueInfo.getType())); } } @@ -1808,17 +1692,19 @@ private OperatorInfo genJoinReduceSinkChild(QB qb, QBJoinTree joinTree, outputRS.put(src, field, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), - valueInfo.getType(), valueInfo.getIsVirtual())); + valueInfo.getType())); } } - return new OperatorInfo(OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc( - reduceKeys, reduceValues, joinTree.getNextTag(), reduceKeys.size()), new RowSchema(outputRS.getColumnInfos()), + return new OperatorInfo( + OperatorFactory.getAndMakeChild( + PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, joinTree.getNextTag(), reduceKeys.size(), -1, false), + new RowSchema(outputRS.getColumnInfos()), child.getOp()), outputRS); } private OperatorInfo genJoinOperator(QB qb, QBJoinTree joinTree, - HashMap map) throws SemanticException { + HashMap map) throws SemanticException { QBJoinTree leftChild = joinTree.getJoinSrc(); OperatorInfo joinSrcOp = null; if (leftChild != null) @@ -1831,7 +1717,7 @@ private OperatorInfo genJoinOperator(QB qb, QBJoinTree joinTree, int pos = 0; for (String src : joinTree.getBaseSrc()) { if (src != null) { - OperatorInfo srcOp = map.get(src).get(0); + OperatorInfo srcOp = map.get(src); srcOps[pos] = genJoinReduceSinkChild(qb, joinTree, srcOp, src, pos); pos++; } else { @@ -1881,13 +1767,11 @@ private void genJoinOperatorTypeCheck(OperatorInfo left, OperatorInfo[] right) t } } - private OperatorInfoList genJoinPlan(QB qb, HashMap map) + private OperatorInfo genJoinPlan(QB qb, HashMap map) throws SemanticException { QBJoinTree joinTree = qb.getQbJoinTree(); OperatorInfo joinOp = genJoinOperator(qb, joinTree, map); - OperatorInfoList output = new OperatorInfoList(); - output.add(joinOp); - return output; + return joinOp; } private QBJoinTree genJoinTree(CommonTree joinParseTree) @@ -2104,19 +1988,19 @@ private void mergeJoinTree(QB qb) { } @SuppressWarnings("nls") - private OperatorInfoList genBodyPlan(QB qb, OperatorInfoList input) + private OperatorInfo genBodyPlan(QB qb, OperatorInfo input) throws SemanticException { QBParseInfo qbp = qb.getParseInfo(); - OperatorInfoList output = new OperatorInfoList(); TreeSet ks = new TreeSet(); ks.addAll(qbp.getClauseNames()); String distinctText = null; CommonTree distn = null; - OperatorInfoList opList = null; + OperatorInfo op = null; boolean grpBy = false; + int numGrpBy = 0; // In case of a multiple group bys, all of them should have the same distinct key for (String dest : ks) { @@ -2124,6 +2008,7 @@ private OperatorInfoList genBodyPlan(QB qb, OperatorInfoList input) if ((qbp.getAggregationExprsForClause(dest).size() != 0) || (getGroupByForClause(qbp, dest).size() > 0)) { grpBy = true; + numGrpBy++; // If there is a distinctFuncExp, add all parameters to the reduceKeys. if (qbp.getDistinctFuncExprForClause(dest) != null) { @@ -2142,63 +2027,71 @@ private OperatorInfoList genBodyPlan(QB qb, OperatorInfoList input) // In the first stage, copy the input and all the group by expressions // and aggregate paramaters. This can be optimized in the future to only - // evaluate expressions that occur frequently - if (grpBy) { + // evaluate expressions that occur frequently. For a single groupby, no need to do so + if (grpBy && (numGrpBy > 1)) { OperatorInfo reduceSinkOperatorInfo = - genGroupByPlanReduceSinkOperator(qbp, input.get(0), distn, ks); + genGroupByPlanReduceSinkOperator(qbp, input, distn, ks); // ////// 2. Generate GroupbyOperator OperatorInfo forwardOperatorInfo = genGroupByPlanForwardOperator(qbp, reduceSinkOperatorInfo); - opList = new OperatorInfoList(); - opList.add(forwardOperatorInfo); + op = forwardOperatorInfo; } // Go over all the destination tables + OperatorInfo curr = null; for (String dest : ks) { boolean groupByExpr = false; if (qbp.getAggregationExprsForClause(dest).size() != 0 || getGroupByForClause(qbp, dest).size() > 0) groupByExpr = true; - OperatorInfoList curr = input; - if (groupByExpr) - curr = opList; + curr = input; + if (groupByExpr && (numGrpBy > 1)) + curr = op; if (qbp.getWhrForClause(dest) != null) { curr = genFilterPlan(dest, qb, curr); } if (qbp.getAggregationExprsForClause(dest).size() != 0 - || getGroupByForClause(qbp, dest).size() > 0) - curr = genGroupByPlan3MR(dest, qb, curr); - else - curr = genSelectPlan(dest, qb, curr); - - if (qbp.getClusterByForClause(dest) != null) { - curr = genReduceSinkPlan(dest, qb, curr); - } else if (!qbp.getIsSubQ()) { + || getGroupByForClause(qbp, dest).size() > 0) { + if (numGrpBy > 1) + curr = genGroupByPlan3MR(dest, qb, curr); + else + curr = genGroupByPlan2MR(dest, qb, curr); + } + + curr = genSelectPlan(dest, qb, curr); + Integer limit = qbp.getDestLimit(dest); + + if (qbp.getIsSubQ()) { + if (qbp.getClusterByForClause(dest) != null) + curr = genReduceSinkPlan(dest, qb, curr, -1); + if (limit != null) + curr = genLimitMapRedPlan(dest, qb, curr, limit.intValue(), false); + } + else + { curr = genConversionOps(dest, qb, curr); + // exact limit can be taken care of by the fetch operator + if (limit != null) { + curr = genLimitMapRedPlan(dest, qb, curr, limit.intValue(), true); + qb.getParseInfo().setOuterQueryLimit(limit.intValue()); + } curr = genFileSinkPlan(dest, qb, curr); } - - output.addAll(curr); } LOG.debug("Created Body Plan for Query Block " + qb.getId()); - return output; + return curr; } @SuppressWarnings("nls") - private OperatorInfoList genUnionPlan(String unionalias, String leftalias, - OperatorInfoList left, String rightalias, OperatorInfoList right) + private OperatorInfo genUnionPlan(String unionalias, String leftalias, + OperatorInfo leftOp, String rightalias, OperatorInfo rightOp) throws SemanticException { - if (left.size() != 1) { - throw new SemanticException("Select has more than one inputs"); - } - OperatorInfo leftOp = left.get(0); RowResolver leftRR = leftOp.getRowResolver(); - OperatorInfo rightOp = right.get(0); RowResolver rightRR = rightOp.getRowResolver(); HashMap leftmap = leftRR.getFieldMap(leftalias); HashMap rightmap = rightRR.getFieldMap(rightalias); @@ -2236,8 +2129,7 @@ private OperatorInfoList genUnionPlan(String unionalias, String leftalias, rightOp.getOp().setChildOperators(child); leftOp.getOp().setChildOperators(child); // create operator info list to return - OperatorInfoList unionout = new OperatorInfoList(); - unionout.add(new OperatorInfo(unionforward, unionoutRR)); + OperatorInfo unionout = new OperatorInfo(unionforward, unionoutRR); return unionout; } @@ -2245,7 +2137,7 @@ private exprNodeDesc genSamplePredicate(TableSample ts) { // ((default_sample_hashfn(cols) & Integer.MAX_VALUE) % denominator) == numerator exprNodeDesc numeratorExpr = new exprNodeConstantDesc( TypeInfoFactory.getPrimitiveTypeInfo(Integer.class), - Integer.valueOf(ts.getNumerator())); + Integer.valueOf(ts.getNumerator() - 1)); exprNodeDesc denominatorExpr = new exprNodeConstantDesc( TypeInfoFactory.getPrimitiveTypeInfo(Integer.class), @@ -2277,7 +2169,7 @@ private exprNodeDesc genSamplePredicate(TableSample ts) { } @SuppressWarnings("nls") - private OperatorInfoList genTablePlan(String alias, QB qb) + private OperatorInfo genTablePlan(String alias, QB qb) throws SemanticException { Table tab = qb.getMetaData().getSrcForAlias(alias); @@ -2289,7 +2181,7 @@ private OperatorInfoList genTablePlan(String alias, QB qb) for (int i=0; i aliasToOpInfo = new HashMap(); + HashMap aliasToOpInfo = new HashMap(); // Recurse over the subqueries to fill the subquery part of the plan for (String alias : qb.getSubqAliases()) { @@ -2417,7 +2308,7 @@ private OperatorInfoList genPlan(QB qb) throws SemanticException { aliasToOpInfo.put(alias, genTablePlan(alias, qb)); } - OperatorInfoList srcOpInfoList = null; + OperatorInfo srcOpInfo = null; // process join if (qb.getParseInfo().getJoinExpr() != null) { @@ -2425,26 +2316,29 @@ private OperatorInfoList genPlan(QB qb) throws SemanticException { QBJoinTree joinTree = genJoinTree(joinExpr); qb.setQbJoinTree(joinTree); mergeJoinTree(qb); - srcOpInfoList = genJoinPlan(qb, aliasToOpInfo); + srcOpInfo = genJoinPlan(qb, aliasToOpInfo); } else // Now if there are more than 1 sources then we have a join case // later we can extend this to the union all case as well - srcOpInfoList = aliasToOpInfo.values() - .iterator().next(); + srcOpInfo = aliasToOpInfo.values().iterator().next(); - OperatorInfoList bodyOpInfoList = genBodyPlan(qb, srcOpInfoList); + OperatorInfo bodyOpInfo = genBodyPlan(qb, srcOpInfo); LOG.debug("Created Plan for Query Block " + qb.getId()); - return bodyOpInfoList; + // is it a top level QB, and can it be optimized ? For eg: select * from T does not need a map-reduce job + QBParseInfo qbp = qb.getParseInfo(); + qbp.setCanOptTopQ(qb.isSelectStarQuery()); + + return bodyOpInfo; } - private Operator getReducer(Operator top) { + private Operator getReduceSink(Operator top) { if (top.getClass() == ReduceSinkOperator.class) { // Get the operator following the reduce sink assert (top.getChildOperators().size() == 1); - return top.getChildOperators().get(0); + return top; } List> childOps = top.getChildOperators(); @@ -2453,7 +2347,7 @@ private Operator getReducer(Operator reducer = getReducer(childOps.get(i)); + Operator reducer = getReduceSink(childOps.get(i)); if (reducer != null) { return reducer; } @@ -2463,13 +2357,41 @@ private Operator getReducer(Operator mvTask = null; + Task fetchTask = null; + + if (qb.getParseInfo().getCanOptTopQ()) { + Iterator> iter = qb.getMetaData().getAliasToTable().entrySet().iterator(); + Table tab = ((Map.Entry)iter.next()).getValue(); + fetch = new fetchWork(tab.getPath(), tab.getDeserializer().getClass(), + tab.getInputFormatClass(), tab.getSchema(), qb.getParseInfo().getOuterQueryLimit()); + + fetchTask = TaskFactory.get(fetch, this.conf); + setFetchTask(fetchTask); + return; + } - // First we generate the move work as this needs to be made dependent on all - // the tasks - // that have a file sink operation - moveWork mv = new moveWork(loadTableWork, loadFileWork); - Task mvTask = TaskFactory.get(mv, this.conf); + // In case of a select, use a fetch task instead of a move task + if (qb.getIsQuery()) { + if ((!loadTableWork.isEmpty()) || (loadFileWork.size() != 1)) + throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg()); + String cols = loadFileWork.get(0).getColumns(); + fetch = new fetchWork(new Path(loadFileWork.get(0).getSourceDir()), + MetadataTypedColumnsetSerDe.class, TextInputFormat.class, + Utilities.makeProperties("columns", cols), qb.getParseInfo().getOuterQueryLimit()); + + fetchTask = TaskFactory.get(fetch, this.conf); + setFetchTask(fetchTask); + } + else { + // First we generate the move work as this needs to be made dependent on all + // the tasks that have a file sink operation + mv = new moveWork(loadTableWork, loadFileWork); + mvTask = TaskFactory.get(mv, this.conf); + } // Maintain a map from the top level left most reducer in each of these // trees @@ -2479,12 +2401,24 @@ private void genMapRedTasks() throws SemanticException { new HashMap, Task>(); for (String alias_id : this.topOps.keySet()) { Operator topOp = this.topOps.get(alias_id); - Operator reducer = getReducer(topOp); + Operator reduceSink = getReduceSink(topOp); + Operator reducer = null; + if (reduceSink != null) + reducer = reduceSink.getChildOperators().get(0); Task rootTask = opTaskMap.get(reducer); if (rootTask == null) { rootTask = TaskFactory.get(getMapRedWork(), this.conf); opTaskMap.put(reducer, rootTask); ((mapredWork) rootTask.getWork()).setReducer(reducer); + reduceSinkDesc desc = (reduceSink == null) ? null : (reduceSinkDesc)reduceSink.getConf(); + + // The number of reducers may be specified in the plan in some cases, or may need to be inferred + if (desc != null) { + if (desc.getNumReducers() != -1) + ((mapredWork) rootTask.getWork()).setNumReduceTasks(new Integer(desc.getNumReducers())); + else if (desc.getInferNumReducers() == true) + ((mapredWork) rootTask.getWork()).setInferNumReducers(true); + } this.rootTasks.add(rootTask); } genTaskPlan(topOp, rootTask, opTaskMap, mvTask); @@ -2533,7 +2467,7 @@ private void genTaskPlan(Operator op, Task, Task> redTaskMap, Task mvTask) { // Check if this is a file sink operator - if (op.getClass() == FileSinkOperator.class) { + if ((op.getClass() == FileSinkOperator.class) && (mvTask != null)) { // If this is a file sink operator then set the move task to be dependent // on the current task currTask.addDependentTask(mvTask); @@ -2577,6 +2511,10 @@ else if (plan.getReducer() != reducer) { ctask = TaskFactory.get(cplan, this.conf); // Add the reducer cplan.setReducer(reducer); + if (((reduceSinkDesc)child.getConf()).getNumReducers() != -1) + cplan.setNumReduceTasks(new Integer(((reduceSinkDesc)child.getConf()).getNumReducers())); + else + cplan.setInferNumReducers(((reduceSinkDesc)child.getConf()).getInferNumReducers()); redTaskMap.put(reducer, ctask); // Recurse on the reducer @@ -2668,7 +2606,7 @@ private mapredWork getMapRedWork() { @Override @SuppressWarnings("nls") - public void analyze(CommonTree ast, Context ctx) throws SemanticException { + public void analyzeInternal(CommonTree ast, Context ctx) throws SemanticException { this.ctx = ctx; reset(); @@ -2696,7 +2634,7 @@ public void analyze(CommonTree ast, Context ctx) throws SemanticException { // At this point we have the complete operator tree // from which we want to find the reduce operator - genMapRedTasks(); + genMapRedTasks(qb); LOG.info("Completed plan generation"); @@ -2716,6 +2654,7 @@ public static exprNodeDesc getFuncExprNodeDesc(String name, exprNodeDesc... chil /** * This function create an ExprNodeDesc for a UDF function given the children (arguments). * It will insert implicit type conversion functions if necessary. + * @throws SemanticException */ public static exprNodeDesc getFuncExprNodeDesc(String udfName, List children) { // Find the corresponding method @@ -2725,7 +2664,33 @@ public static exprNodeDesc getFuncExprNodeDesc(String udfName, List specialUnaryOperatorTextHashMap; static HashMap specialFunctionTextHashMap; static HashMap conversionFunctionTextHashMap; static { + specialUnaryOperatorTextHashMap = new HashMap(); + specialUnaryOperatorTextHashMap.put(HiveParser.PLUS, "positive"); + specialUnaryOperatorTextHashMap.put(HiveParser.MINUS, "negative"); specialFunctionTextHashMap = new HashMap(); specialFunctionTextHashMap.put(HiveParser.TOK_ISNULL, "isnull"); specialFunctionTextHashMap.put(HiveParser.TOK_ISNOTNULL, "isnotnull"); @@ -2873,16 +2841,23 @@ public static boolean isRedundantConversionFunction(CommonTree expr, boolean isF } public static String getFunctionText(CommonTree expr, boolean isFunction) { - String funcText; + String funcText = null; if (!isFunction) { - // For operator, the function name is the operator text - funcText = expr.getText(); + // For operator, the function name is the operator text, unless it's in our special dictionary + if (expr.getChildCount() == 1) { + funcText = specialUnaryOperatorTextHashMap.get(expr.getType()); + } + if (funcText == null) { + funcText = expr.getText(); + } } else { - // For TOK_FUNCTION, the function name is stored in the first child. + // For TOK_FUNCTION, the function name is stored in the first child, unless it's in our + // special dictionary. assert(expr.getChildCount() >= 1); - funcText = specialFunctionTextHashMap.get(((CommonTree)expr.getChild(0)).getType()); + int funcType = ((CommonTree)expr.getChild(0)).getType(); + funcText = specialFunctionTextHashMap.get(funcType); if (funcText == null) { - funcText = conversionFunctionTextHashMap.get(((CommonTree)expr.getChild(0)).getType()); + funcText = conversionFunctionTextHashMap.get(funcType); } if (funcText == null) { funcText = ((CommonTree)expr.getChild(0)).getText(); @@ -2911,24 +2886,60 @@ static exprNodeDesc getXpathOrFuncExprNodeDesc(CommonTree expr, boolean isFuncti exprNodeConstantDesc fieldName = (exprNodeConstantDesc)children.get(1); assert(fieldName.getValue() instanceof String); - // Calculate TypeInfo + // Calculate result TypeInfo String fieldNameString = (String)fieldName.getValue(); - TypeInfo t = object.getTypeInfo().getStructFieldTypeInfo(fieldNameString); + TypeInfo objectTypeInfo = object.getTypeInfo(); + + // Allow accessing a field of list element structs directly from a list + boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST); + if (isList) { + objectTypeInfo = objectTypeInfo.getListElementTypeInfo(); + } + if (objectTypeInfo.getCategory() != Category.STRUCT) { + throw new SemanticException(ErrorMsg.INVALID_DOT.getMsg(expr)); + } + TypeInfo t = objectTypeInfo.getStructFieldTypeInfo(fieldNameString); + if (isList) { + t = TypeInfoFactory.getListTypeInfo(t); + } - desc = new exprNodeFieldDesc(t, children.get(0), fieldNameString); + desc = new exprNodeFieldDesc(t, children.get(0), fieldNameString, isList); } else if (funcText.equals("[")){ // "[]" : LSQUARE/INDEX Expression assert(children.size() == 2); - // Only allow constant integer index for now - if (!(children.get(1) instanceof exprNodeConstantDesc) - || !(((exprNodeConstantDesc)children.get(1)).getValue() instanceof Integer)) { - throw new SemanticException(ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg(expr)); - } - // Calculate TypeInfo - TypeInfo t = children.get(0).getTypeInfo().getListElementTypeInfo(); - desc = new exprNodeIndexDesc(t, children.get(0), children.get(1)); + // Check whether this is a list or a map + TypeInfo myt = children.get(0).getTypeInfo(); + + if (myt.getCategory() == Category.LIST) { + // Only allow constant integer index for now + if (!(children.get(1) instanceof exprNodeConstantDesc) + || !(((exprNodeConstantDesc)children.get(1)).getValue() instanceof Integer)) { + throw new SemanticException(ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg(expr)); + } + + // Calculate TypeInfo + TypeInfo t = myt.getListElementTypeInfo(); + desc = new exprNodeIndexDesc(t, children.get(0), children.get(1)); + } + else if (myt.getCategory() == Category.MAP) { + // Only allow only constant indexes for now + if (!(children.get(1) instanceof exprNodeConstantDesc)) { + throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg(expr)); + } + if (!(((exprNodeConstantDesc)children.get(1)).getValue().getClass() == + myt.getMapKeyTypeInfo().getPrimitiveClass())) { + throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE.getMsg(expr)); + } + // Calculate TypeInfo + TypeInfo t = myt.getMapValueTypeInfo(); + + desc = new exprNodeIndexDesc(t, children.get(0), children.get(1)); + } + else { + throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr)); + } } else { // other operators or functions Class udf = FunctionRegistry.getUDFClass(funcText); @@ -2955,6 +2966,16 @@ static exprNodeDesc getXpathOrFuncExprNodeDesc(CommonTree expr, boolean isFuncti } } } + // UDFOPPositive is a no-op. + // However, we still create it, and then remove it here, to make sure we only allow + // "+" for numeric types. + if (desc instanceof exprNodeFuncDesc) { + exprNodeFuncDesc funcDesc = (exprNodeFuncDesc)desc; + if (funcDesc.getUDFClass().equals(UDFOPPositive.class)) { + assert(funcDesc.getChildren().size() == 1); + desc = funcDesc.getChildren().get(0); + } + } assert(desc != null); return desc; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 82629e930..91602ea64 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -36,8 +36,11 @@ public static BaseSemanticAnalyzer get(HiveConf conf, CommonTree tree) throws Se case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_ALTERTABLE_ADDCOLS: + case HiveParser.TOK_ALTERTABLE_REPLACECOLS: case HiveParser.TOK_ALTERTABLE_RENAME: - case HiveParser.TOK_SHOWTABLES: + case HiveParser.TOK_ALTERTABLE_DROPPARTS: + case HiveParser.TOK_SHOWTABLES: + case HiveParser.TOK_SHOWPARTITIONS: return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_CREATEFUNCTION: return new FunctionSemanticAnalyzer(conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index 04e40e9de..87b2c94bd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -22,11 +22,12 @@ public class DDLWork implements Serializable { private static final long serialVersionUID = 1L; - private createTableDesc createTblDesc; - private dropTableDesc dropTblDesc; - private alterTableDesc alterTblDesc; - private showTablesDesc showTblsDesc; - private descTableDesc descTblDesc; + private createTableDesc createTblDesc; + private dropTableDesc dropTblDesc; + private alterTableDesc alterTblDesc; + private showTablesDesc showTblsDesc; + private showPartitionsDesc showPartsDesc; + private descTableDesc descTblDesc; public DDLWork() { } @@ -65,6 +66,13 @@ public DDLWork(showTablesDesc showTblsDesc) { this.showTblsDesc = showTblsDesc; } + /** + * @param showPartsDesc + */ + public DDLWork(showPartitionsDesc showPartsDesc) { + this.showPartsDesc = showPartsDesc; + } + /** * @return the createTblDesc */ @@ -125,6 +133,22 @@ public void setShowTblsDesc(showTablesDesc showTblsDesc) { this.showTblsDesc = showTblsDesc; } + + /** + * @return the showPartsDesc + */ + @explain(displayName="Show Partitions Operator") + public showPartitionsDesc getShowPartsDesc() { + return showPartsDesc; + } + + /** + * @param showPartsDesc the showPartsDesc to set + */ + public void setShowPartsDesc(showPartitionsDesc showPartsDesc) { + this.showPartsDesc = showPartsDesc; + } + /** * @return the descTblDesc */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index 8a7c2cb7e..46aecad63 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -63,21 +63,14 @@ public static tableDesc getDefaultTableDesc(String separatorCode) { // We will make reduce key and reduce value TableDesc with configurable SerDes - public static reduceSinkDesc getReduceSinkDesc(ArrayList keyCols, - ArrayList valueCols, int numPartitionFields) { + public static reduceSinkDesc getReduceSinkDesc(ArrayList keyCols, + ArrayList valueCols, + int tag, int numPartitionFields, + int numReducers, boolean inferNumReducers) { - return new reduceSinkDesc(keyCols, valueCols, numPartitionFields, - getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(keyCols.size())), - getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(valueCols.size()))); - } - - // We will make reduce key and reduce value TableDesc with configurable SerDes - public static reduceSinkDesc getReduceSinkDesc(ArrayList keyCols, - ArrayList valueCols, int tag, int numPartitionFields) { - - return new reduceSinkDesc(keyCols, valueCols, tag, numPartitionFields, - getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(keyCols.size())), - getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(valueCols.size()))); + return new reduceSinkDesc(keyCols, valueCols, tag, numPartitionFields, numReducers, inferNumReducers, + getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(keyCols.size())), + getDefaultTableDesc("" + Utilities.ctrlaCode, ObjectInspectorUtils.getIntegerCSV(valueCols.size()))); } // We should read the TableDesc from gWork when it is available. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java index 64f281a6f..0af2cdf4c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; -import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -29,7 +28,7 @@ public class alterTableDesc extends ddlDesc implements Serializable { private static final long serialVersionUID = 1L; - public static enum alterTableTypes {RENAME, ADDCOLS}; + public static enum alterTableTypes {RENAME, ADDCOLS, REPLACECOLS}; alterTableTypes op; String oldName; @@ -50,8 +49,8 @@ public alterTableDesc(String oldName, String newName) { * @param name name of the table * @param newCols new columns to be added */ - public alterTableDesc(String name, List newCols) { - op = alterTableTypes.ADDCOLS; + public alterTableDesc(String name, List newCols, alterTableTypes alterType) { + this.op = alterType; this.oldName = name; this.newCols = newCols; } @@ -100,6 +99,8 @@ public String getAlterTableTypeString() { return "rename"; case ADDCOLS: return "add columns"; + case REPLACECOLS: + return "replace columns"; } return "unknown"; @@ -128,4 +129,5 @@ public List getNewColsString() { public void setNewCols(List newCols) { this.newCols = newCols; } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java index e63466e3d..3686973ad 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java @@ -20,6 +20,7 @@ import java.io.Serializable; import java.util.List; +import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; @@ -43,6 +44,8 @@ public class createTableDesc extends ddlDesc implements Serializable String comment; boolean isSequenceFile; String location; + String serName; + Map mapProp; public createTableDesc(String tableName, boolean isExternal, List cols, List partCols, @@ -51,7 +54,7 @@ public createTableDesc(String tableName, boolean isExternal, String collItemDelim, String mapKeyDelim, String lineDelim, String comment, boolean isSequenceFile, - String location) { + String location, String serName, Map mapProp) { this.tableName = tableName; this.isExternal = isExternal; this.bucketCols = bucketCols; @@ -66,6 +69,8 @@ public createTableDesc(String tableName, boolean isExternal, this.mapKeyDelim = mapKeyDelim; this.numBuckets = numBuckets; this.partCols = partCols; + this.serName = serName; + this.mapProp = mapProp; } @explain(displayName="name") @@ -166,7 +171,7 @@ public void setComment(String comment) { this.comment = comment; } - @explain(displayName="isSequenceFile") + @explain(displayName="isSequenceFile") public boolean isSequenceFile() { return isSequenceFile; } @@ -207,4 +212,35 @@ public List getSortCols() { public void setSortCols(List sortCols) { this.sortCols = sortCols; } + + /** + * @return the serDeName + */ + @explain(displayName="serde name") + public String getSerName() { + return serName; + } + + /** + * @param serName the serName to set + */ + public void setSerName(String serName) { + this.serName = serName; + } + + /** + * @return the serDe properties + */ + @explain(displayName="serde properties") + public Map getMapProp() { + return mapProp; + } + + /** + * @param mapProp the map properties to set + */ + public void setMapProp(Map mapProp) { + this.mapProp = mapProp; + } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java index a88042c00..3b099342b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java @@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.HashMap; + import org.apache.hadoop.fs.Path; @explain(displayName="Describe Table") @@ -26,27 +28,68 @@ public class descTableDesc extends ddlDesc implements Serializable { private static final long serialVersionUID = 1L; - String tableName; + String tableName; + HashMap partSpec; Path resFile; + boolean isExt; /** + * @param isExt + * @param partSpec * @param resFile * @param tableName */ - public descTableDesc(Path resFile, String tableName) { + public descTableDesc(Path resFile, String tableName, HashMap partSpec, boolean isExt) { + this.isExt = isExt; + this.partSpec = partSpec; this.resFile = resFile; this.tableName = tableName; } + /** + * @return the isExt + */ + public boolean isExt() { + return isExt; + } + + /** + * @param isExt the isExt to set + */ + public void setExt(boolean isExt) { + this.isExt = isExt; + } + + /** + * @return the tableName + */ @explain(displayName="table") public String getTableName() { return tableName; } + /** + * @param tableName the tableName to set + */ public void setTableName(String tableName) { this.tableName = tableName; } + /** + * @return the partSpec + */ + @explain(displayName="partition") + public HashMap getPartSpec() { + return partSpec; + } + + /** + * @param partSpec the partSpec to set + */ + public void setPartSpecs(HashMap partSpec) { + this.partSpec = partSpec; + } + /** * @return the resFile */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java index 248450951..4d5ea1d18 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java @@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.HashMap; +import java.util.List; @explain(displayName="Drop Table") public class dropTableDesc extends ddlDesc implements Serializable @@ -26,12 +28,19 @@ public class dropTableDesc extends ddlDesc implements Serializable private static final long serialVersionUID = 1L; String tableName; + List> partSpecs; /** * @param tableName */ public dropTableDesc(String tableName) { this.tableName = tableName; + this.partSpecs = null; + } + + public dropTableDesc(String tableName, List> partSpecs) { + this.tableName = tableName; + this.partSpecs = partSpecs; } /** @@ -48,4 +57,18 @@ public String getTableName() { public void setTableName(String tableName) { this.tableName = tableName; } + + /** + * @return the partSpecs + */ + public List> getPartSpecs() { + return partSpecs; + } + + /** + * @param partSpecs the partSpecs to set + */ + public void setPartSpecs(List> partSpecs) { + this.partSpecs = partSpecs; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java index 7cd2bc631..e43820186 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java @@ -26,11 +26,13 @@ public class exprNodeColumnDesc extends exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; private String column; + private boolean isVirtual; public exprNodeColumnDesc() {} public exprNodeColumnDesc(TypeInfo typeInfo, String column) { super(typeInfo); this.column = column; + this.isVirtual = isVirtual; } public exprNodeColumnDesc(Class c, String column) { super(TypeInfoFactory.getPrimitiveTypeInfo(c)); @@ -42,6 +44,7 @@ public String getColumn() { public void setColumn(String column) { this.column = column; } + public String toString() { return "Column[" + column + "]"; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java index ae1434364..bcba3f110 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java @@ -25,13 +25,18 @@ public class exprNodeFieldDesc extends exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; exprNodeDesc desc; - String fieldName; + String fieldName; + + // Used to support a.b where a is a list of struct that contains a field called b. + // a.b will return an array that contains field b of all elements of array a. + Boolean isList; public exprNodeFieldDesc() {} - public exprNodeFieldDesc(TypeInfo typeInfo, exprNodeDesc desc, String fieldName) { + public exprNodeFieldDesc(TypeInfo typeInfo, exprNodeDesc desc, String fieldName, Boolean isList) { super(typeInfo); this.desc = desc; - this.fieldName = fieldName; + this.fieldName = fieldName; + this.isList = isList; } public exprNodeDesc getDesc() { @@ -45,7 +50,14 @@ public String getFieldName() { } public void setFieldName(String fieldName) { this.fieldName = fieldName; - } + } + public Boolean getIsList() { + return isList; + } + public void setIsList(Boolean isList) { + this.isList = isList; + } + @Override public String toString() { return this.desc.toString() + "." + this.fieldName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java new file mode 100644 index 000000000..07f816770 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java @@ -0,0 +1,131 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; +import java.util.Properties; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.serde2.Deserializer; +import org.apache.hadoop.mapred.InputFormat; + +@explain(displayName="Fetch Operator") +public class fetchWork implements Serializable { + private static final long serialVersionUID = 1L; + + // private loadFileDesc loadFileWork; + // private tableDesc tblDesc; + private Path srcDir; + private Properties schema; + private Class deserializerClass; + private Class inputFormatClass; + private int limit; + + public fetchWork() { } + + /** + * @param deserializer + * @param deserializerClass + * @param inputFormatClass + * @param schema + * @param srcDir + */ + public fetchWork(Path srcDir, + Class deserializerClass, + Class inputFormatClass, Properties schema, + int limit) { + this.srcDir = srcDir; + this.deserializerClass = deserializerClass; + this.inputFormatClass = inputFormatClass; + this.schema = schema; + this.limit = limit; + } + + /** + * @return the srcDir + */ + @explain(displayName="source") + public Path getSrcDir() { + return srcDir; + } + + /** + * @param srcDir the srcDir to set + */ + public void setSrcDir(Path srcDir) { + this.srcDir = srcDir; + } + + /** + * @return the schema + */ + public Properties getSchema() { + return schema; + } + + /** + * @param schema the schema to set + */ + public void setSchema(Properties schema) { + this.schema = schema; + } + + /** + * @return the deserializerClass + */ + public Class getDeserializerClass() { + return deserializerClass; + } + + /** + * @param deserializerClass the deserializerClass to set + */ + public void setDeserializerClass(Class deserializerClass) { + this.deserializerClass = deserializerClass; + } + + /** + * @return the inputFormatClass + */ + public Class getInputFormatClass() { + return inputFormatClass; + } + + /** + * @param inputFormatClass the inputFormatClass to set + */ + public void setInputFormatClass(Class inputFormatClass) { + this.inputFormatClass = inputFormatClass; + } + + /** + * @return the limit + */ + @explain(displayName="limit") + public int getLimit() { + return limit; + } + + /** + * @param limit the limit to set + */ + public void setLimit(int limit) { + this.limit = limit; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java new file mode 100644 index 000000000..dce49d874 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +@explain(displayName="Limit") +public class limitDesc implements Serializable { + private static final long serialVersionUID = 1L; + private int limit; + public limitDesc() { } + public limitDesc(final int limit) { + this.limit = limit; + } + + public int getLimit() { + return this.limit; + } + public void setLimit(final int limit) { + this.limit=limit; + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java index babcfe1ad..102b80e4d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java @@ -19,22 +19,28 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.List; + import org.apache.hadoop.hive.ql.plan.loadDesc; public class loadFileDesc extends loadDesc implements Serializable { private static final long serialVersionUID = 1L; private String targetDir; private boolean isDfsDir; + // list of columns, comma separated + private String columns; public loadFileDesc() { } public loadFileDesc( final String sourceDir, final String targetDir, - final boolean isDfsDir) { + final boolean isDfsDir, + final String columns) { super(sourceDir); this.targetDir = targetDir; this.isDfsDir = isDfsDir; + this.columns = columns; } @explain(displayName="destination") @@ -52,4 +58,18 @@ public boolean getIsDfsDir() { public void setIsDfsDir(final boolean isDfsDir) { this.isDfsDir = isDfsDir; } + + /** + * @return the columns + */ + public String getColumns() { + return columns; + } + + /** + * @param columns the columns to set + */ + public void setColumns(String columns) { + this.columns = columns; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java index 0656d5774..e0fc38f82 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java @@ -49,6 +49,7 @@ public class mapredWork implements Serializable { private Integer numReduceTasks; private boolean needsTagging; + private boolean inferNumReducers; public mapredWork() { } public mapredWork( @@ -197,4 +198,13 @@ public boolean getNeedsTagging() { public void setNeedsTagging(boolean needsTagging) { this.needsTagging = needsTagging; } + + public boolean getInferNumReducers() { + return this.inferNumReducers; + } + + public void setInferNumReducers(boolean inferNumReducers) { + this.inferNumReducers = inferNumReducers; + } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java index d0fba4c48..3c028d2f0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java @@ -38,33 +38,25 @@ public class reduceSinkDesc implements Serializable { // If the value is -1, then data will go to a random reducer private int numPartitionFields; - public reduceSinkDesc() { } + private boolean inferNumReducers; + private int numReducers; - public reduceSinkDesc - (final java.util.ArrayList keyCols, - final java.util.ArrayList valueCols, - final int numPartitionFields, - final tableDesc keySerializeInfo, - final tableDesc valueSerializeInfo) { - this.keyCols = keyCols; - this.valueCols = valueCols; - this.tag = -1; - this.numPartitionFields = numPartitionFields; - this.keySerializeInfo = keySerializeInfo; - this.valueSerializeInfo = valueSerializeInfo; - } + public reduceSinkDesc() { } public reduceSinkDesc (java.util.ArrayList keyCols, java.util.ArrayList valueCols, int tag, int numPartitionFields, + int numReducers, + boolean inferNumReducers, final tableDesc keySerializeInfo, final tableDesc valueSerializeInfo) { this.keyCols = keyCols; this.valueCols = valueCols; - assert tag != -1; this.tag = tag; + this.numReducers = numReducers; + this.inferNumReducers = inferNumReducers; this.numPartitionFields = numPartitionFields; this.keySerializeInfo = keySerializeInfo; this.valueSerializeInfo = valueSerializeInfo; @@ -104,6 +96,20 @@ public void setTag(int tag) { this.tag = tag; } + public boolean getInferNumReducers() { + return this.inferNumReducers; + } + public void setInferNumReducers(boolean inferNumReducers) { + this.inferNumReducers = inferNumReducers; + } + + public int getNumReducers() { + return this.numReducers; + } + public void setNumReducers(int numReducers) { + this.numReducers = numReducers; + } + public tableDesc getKeySerializeInfo() { return keySerializeInfo; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/showPartitionsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/showPartitionsDesc.java new file mode 100644 index 000000000..72f7feb8f --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/showPartitionsDesc.java @@ -0,0 +1,72 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; +import org.apache.hadoop.fs.Path; + +@explain(displayName="Show Partitions") +public class showPartitionsDesc extends ddlDesc implements Serializable +{ + private static final long serialVersionUID = 1L; + String tabName; + Path resFile; + + /** + * @param tabName Name of the table whose partitions need to be listed + * @param resFile File to store the results in + */ + public showPartitionsDesc(String tabName, Path resFile) { + this.tabName = tabName; + this.resFile = resFile; + } + + /** + * @return the name of the table + */ + @explain(displayName="table") + public String getTabName() { + return tabName; + } + + /** + * @param tabName the table whose partitions have to be listed + */ + public void setTabName(String tabName) { + this.tabName = tabName; + } + + /** + * @return the results file + */ + public Path getResFile() { + return resFile; + } + + @explain(displayName="result file", normalExplain=false) + public String getResFileString() { + return getResFile().getName(); + } + /** + * @param resFile the results file to be used to return the results + */ + public void setResFile(Path resFile) { + this.resFile = resFile; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java index 26a38fcfb..3e50d8200 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java @@ -47,7 +47,7 @@ public tableDesc( public Class getDeserializerClass() { return this.deserializerClass; } - public void setDeserializerClass(final Class serdeClass) { + public void setDeserializerClass(final Class serdeClass) { this.deserializerClass = serdeClass; } public Class getInputFileFormatClass() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java index 16c7353d7..2f7d1f179 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java @@ -95,12 +95,14 @@ public List getAllStructFieldTypeInfos() { } public TypeInfo getStructFieldTypeInfo(String field) { + String fieldLowerCase = field.toLowerCase(); for(int i=0; i s) { + if (s == null) { + return -1; + } + return s.size(); + } + public Integer evaluate(List s) { + if (s == null) { + return -1; + } + return s.size(); + } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java index 5d94e0013..5df605813 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFStrEq extends UDF { +public class UDFStrEq implements UDF { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFStrEq"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java index c4bdb9fa1..0dfb0fdde 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFStrGe extends UDF { +public class UDFStrGe implements UDF { public UDFStrGe() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java index bbc123b9f..d407aa223 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFStrGt extends UDF { +public class UDFStrGt implements UDF { public UDFStrGt() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java index 79db3dab7..8449b496c 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFStrLe extends UDF { +public class UDFStrLe implements UDF { public UDFStrLe() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLt.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLt.java index f61f39454..198e3181b 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLt.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLt.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFStrLt extends UDF { +public class UDFStrLt implements UDF { public UDFStrLt() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java index de7fc71e6..7086172cf 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFStrNe extends UDF { +public class UDFStrNe implements UDF { public UDFStrNe() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java index 5bafaaec1..70e0d5e17 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java @@ -21,7 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFSubstr extends UDF { +public class UDFSubstr implements UDF { public UDFSubstr() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java index 2dae79969..c51a55df7 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToBoolean extends UDF { +public class UDFToBoolean implements UDF { private static Log LOG = LogFactory.getLog(UDFToBoolean.class.getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java index a353af72e..abea8bb17 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToByte extends UDF { +public class UDFToByte implements UDF { private static Log LOG = LogFactory.getLog(UDFToByte.class.getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java index bd50e90a9..74cc122fa 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToDate extends UDF { +public class UDFToDate implements UDF { private static Log LOG = LogFactory.getLog(UDFToDate.class.getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java index 20810050e..77975d779 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToDouble extends UDF { +public class UDFToDouble implements UDF { private static Log LOG = LogFactory.getLog(UDFToDouble.class.getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java index e3ba5c2d5..574195936 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToFloat extends UDF { +public class UDFToFloat implements UDF { private static Log LOG = LogFactory.getLog(UDFToFloat.class.getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java index 45d593959..ee61fb229 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToInteger extends UDF { +public class UDFToInteger implements UDF { private static Log LOG = LogFactory.getLog(UDFToInteger.class.getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java index 37e1a5b60..73f45b4a2 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToLong extends UDF { +public class UDFToLong implements UDF { private static Log LOG = LogFactory.getLog(UDFToLong.class.getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java index 23fced3e8..f8a8f92cc 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToString extends UDF { +public class UDFToString implements UDF { private static Log LOG = LogFactory.getLog(UDFToString.class.getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java index 4a059c605..1a11cf94d 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java @@ -24,7 +24,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFTrim extends UDF { +public class UDFTrim implements UDF { public UDFTrim() { } @@ -33,7 +33,7 @@ public String evaluate(String s) { if (s == null) { return null; } - return StringUtils.stripEnd(s, " "); + return StringUtils.strip(s, " "); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java index 3e73c5e34..f1c3ab9ad 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java @@ -22,7 +22,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFUpper extends UDF { +public class UDFUpper implements UDF { public UDFUpper() { } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java index cd3e95e63..16945b545 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java @@ -53,7 +53,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.ql.thrift.Complex; +import org.apache.hadoop.hive.serde2.thrift.test.Complex; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; @@ -277,7 +277,7 @@ public void createSources() throws Exception { Table srcThrift = new Table("src_thrift"); srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName()); srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName()); - srcThrift.setSerializationLib(ThriftDeserializer.shortName()); + srcThrift.setSerializationLib(ThriftDeserializer.class.getName()); srcThrift.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName()); srcThrift.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName()); db.createTable(srcThrift); @@ -364,7 +364,6 @@ public void cliInit(String tname) throws Exception { CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; - ss.err = System.err; File qf = new File(outDir, tname); File outf = null; @@ -372,6 +371,7 @@ public void cliInit(String tname) throws Exception { outf = new File(outf, qf.getName().concat(".out")); FileOutputStream fo = new FileOutputStream(outf); ss.out = new PrintStream(fo, true, "UTF-8"); + ss.err = ss.out; ss.setIsSilent(true); cliDriver = new CliDriver(ss); SessionState.start(ss); @@ -644,7 +644,10 @@ public CommonTree parseQuery(String tname) throws Exception { public List> analyzeAST(CommonTree ast) throws Exception { // Do semantic analysis and plan generation - sem.analyze(ast, new Context(conf)); + Context ctx = new Context(conf); + ctx.makeScratchDir(); + sem.analyze(ast, ctx); + ctx.removeScratchDir(); return sem.getRootTasks(); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 8d276a467..5fe09995b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -203,8 +203,7 @@ private void populateMapRedPlan1(Table src) { Operator op1 = OperatorFactory.get (PlanUtils.getReduceSinkDesc (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")), - Utilities.makeList(new exprNodeColumnDesc(String.class, "value")), - 1)); + Utilities.makeList(new exprNodeColumnDesc(String.class, "value")), -1, 1, -1, false)); Utilities.addMapWork(mr, src, "a", op1); @@ -228,8 +227,7 @@ private void populateMapRedPlan2(Table src) { (PlanUtils.getReduceSinkDesc (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")), Utilities.makeList(new exprNodeColumnDesc(String.class, "key"), - new exprNodeColumnDesc(String.class, "value")), - 1)); + new exprNodeColumnDesc(String.class, "value")), -1, 1, -1, false)); Utilities.addMapWork(mr, src, "a", op1); @@ -260,8 +258,7 @@ private void populateMapRedPlan3(Table src, Table src2) { (PlanUtils.getReduceSinkDesc (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")), Utilities.makeList - (new exprNodeColumnDesc(String.class, "value")), Byte.valueOf((byte)0), - 1)); + (new exprNodeColumnDesc(String.class, "value")), Byte.valueOf((byte)0), 1, -1, false)); Utilities.addMapWork(mr, src, "a", op1); @@ -270,7 +267,7 @@ private void populateMapRedPlan3(Table src, Table src2) { (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")), Utilities.makeList(new exprNodeColumnDesc(String.class, "key")), Byte.valueOf((byte)1), - Integer.MAX_VALUE)); + Integer.MAX_VALUE, -1, false)); Utilities.addMapWork(mr, src2, "b", op2); @@ -291,7 +288,8 @@ private void populateMapRedPlan3(Table src, Table src2) { new exprNodeColumnDesc(TypeInfoFactory.getListTypeInfo( TypeInfoFactory.getPrimitiveTypeInfo(String.class)), Utilities.ReduceField.VALUE.toString()), - "0"))), op4); + "0", + false))), op4); mr.setReducer(op5); } @@ -307,7 +305,7 @@ private void populateMapRedPlan4(Table src) { (Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")), Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey"), new exprNodeColumnDesc(String.class, "tvalue")), - 1)); + -1, 1, -1, false)); Operator op0 = OperatorFactory.get (new scriptDesc("/bin/cat", @@ -343,7 +341,7 @@ private void populateMapRedPlan5(Table src) { (Utilities.makeList(new exprNodeColumnDesc(String.class, "0")), Utilities.makeList(new exprNodeColumnDesc(String.class, "0"), new exprNodeColumnDesc(String.class, "1")), - 1)); + -1, 1, -1, false)); Operator op4 = OperatorFactory.get(new selectDesc( Utilities.makeList(new exprNodeColumnDesc(String.class, "key"), @@ -373,7 +371,7 @@ private void populateMapRedPlan6(Table src) { Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")), Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey"), new exprNodeColumnDesc(String.class, "tvalue")), - 1)); + -1, 1, -1, false)); Operator op0 = OperatorFactory.get (new scriptDesc("\'/bin/cat\'", diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 64ed3ca85..3b6d868b2 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hive.metastore.DB; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; -import org.apache.hadoop.hive.ql.thrift.Complex; +import org.apache.hadoop.hive.serde2.thrift.test.Complex; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; import org.apache.hadoop.hive.serde2.ThriftDeserializer; import org.apache.hadoop.hive.serde.Constants; @@ -137,7 +137,8 @@ public void testTable() throws Throwable { // now that URI is set correctly, set the original table's uri and then compare the two tables tbl.setDataLocation(ft.getDataLocation()); assertTrue("Tables doesn't match: " + tableName, ft.getTTable().equals(tbl.getTTable())); - assertEquals("Serde is not set correctly", tbl.getDeserializer().getShortName(), ft.getDeserializer().getShortName()); + assertEquals("Serde is not set correctly", tbl.getDeserializer().getClass().getName(), ft.getDeserializer().getClass().getName()); + assertEquals("SerializationLib is not set correctly", tbl.getSerializationLib(), MetadataTypedColumnsetSerDe.class.getName()); } catch (HiveException e) { e.printStackTrace(); assertTrue("Unable to fetch table correctly: " + tableName, false); @@ -195,7 +196,8 @@ public void testThriftTable() throws Throwable { // now that URI is set correctly, set the original table's uri and then compare the two tables tbl.setDataLocation(ft.getDataLocation()); assertTrue("Tables doesn't match: " + tableName, ft.getTTable().equals(tbl.getTTable())); - assertEquals("Serde is not set correctly", tbl.getDeserializer().getShortName(), ft.getDeserializer().getShortName()); + assertEquals("SerializationLib is not set correctly", tbl.getSerializationLib(), ThriftDeserializer.class.getName()); + assertEquals("Serde is not set correctly", tbl.getDeserializer().getClass().getName(), ft.getDeserializer().getClass().getName()); } catch (HiveException e) { System.err.println(StringUtils.stringifyException(e)); assertTrue("Unable to fetch table correctly: " + tableName, false); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java index 3c13b1730..151710a16 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java @@ -23,7 +23,7 @@ /** * A UDF for testing, which evaluates the length of a string. */ -public class UDFTestLength extends UDF { +public class UDFTestLength implements UDF { public Integer evaluate(String s) { return s == null ? null : s.length(); } diff --git a/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java b/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java new file mode 100644 index 000000000..83e10884f --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java @@ -0,0 +1,190 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2; + +import java.io.UnsupportedEncodingException; +import java.nio.charset.CharacterCodingException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.hive.serde2.SerDeUtils; +import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.SerDeException; + +public class TestSerDe implements SerDe { + + public static final Log LOG = LogFactory.getLog(TestSerDe.class.getName()); + + public String getShortName() { + return shortName(); + } + + + public static String shortName() { + return "test_meta"; + } + + static { + StackTraceElement[] sTrace = new Exception().getStackTrace(); + String className = sTrace[0].getClassName(); + try { + SerDeUtils.registerSerDe(shortName(), Class.forName(className)); + // For backward compatibility: this class replaces the following class. + SerDeUtils.registerSerDe("org.apache.hadoop.hive.serde.TestSerDe", Class.forName(className)); + } catch(Exception e) { + throw new RuntimeException(e); + } + } + + final public static String DefaultSeparator = "\002"; + + private String separator; + // constant for now, will make it configurable later. + private String nullString = "\\N"; + private List columnNames; + private ObjectInspector cachedObjectInspector; + + public String toString() { + return "TestSerDe[" + separator + "," + columnNames + "]"; + } + + public TestSerDe() throws SerDeException { + separator = DefaultSeparator; + } + + public void initialize(Configuration job, Properties tbl) throws SerDeException { + separator = DefaultSeparator; + String alt_sep = tbl.getProperty("testserde.default.serialization.format"); + if(alt_sep != null && alt_sep.length() > 0) { + try { + byte b [] = new byte[1]; + b[0] = Byte.valueOf(alt_sep).byteValue(); + separator = new String(b); + } catch(NumberFormatException e) { + separator = alt_sep; + } + } + + String columnProperty = tbl.getProperty("columns"); + if (columnProperty == null || columnProperty.length() == 0) { + // Hack for tables with no columns + // Treat it as a table with a single column called "col" + cachedObjectInspector = ObjectInspectorFactory.getReflectionObjectInspector( + ColumnSet.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + } else { + columnNames = Arrays.asList(columnProperty.split(",")); + cachedObjectInspector = MetadataListStructObjectInspector.getInstance(columnNames); + } + LOG.info(getClass().getName() + ": initialized with columnNames: " + columnNames ); + } + + public static Object deserialize(ColumnSet c, String row, String sep, String nullString) throws Exception { + if (c.col == null) { + c.col = new ArrayList(); + } else { + c.col.clear(); + } + String [] l1 = row.split(sep, -1); + + for(String s: l1) { + if (s.equals(nullString)) { + c.col.add(null); + } else { + c.col.add(s); + } + } + return (c); + } + + ColumnSet deserializeCache = new ColumnSet(); + public Object deserialize(Writable field) throws SerDeException { + String row = null; + if (field instanceof BytesWritable) { + BytesWritable b = (BytesWritable)field; + try { + row = Text.decode(b.get(), 0, b.getSize()); + } catch (CharacterCodingException e) { + throw new SerDeException(e); + } + } else if (field instanceof Text) { + row = field.toString(); + } + try { + deserialize(deserializeCache, row, separator, nullString); + if (columnNames != null) { + assert(columnNames.size() == deserializeCache.col.size()); + } + return deserializeCache; + } catch (ClassCastException e) { + throw new SerDeException( this.getClass().getName() + " expects Text or BytesWritable", e); + } catch (Exception e) { + throw new SerDeException(e); + } + } + + + public ObjectInspector getObjectInspector() throws SerDeException { + return cachedObjectInspector; + } + + public Class getSerializedClass() { + return Text.class; + } + + Text serializeCache = new Text(); + public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException { + + if (objInspector.getCategory() != Category.STRUCT) { + throw new SerDeException(getClass().toString() + + " can only serialize struct types, but we got: " + objInspector.getTypeName()); + } + StructObjectInspector soi = (StructObjectInspector) objInspector; + List fields = soi.getAllStructFieldRefs(); + + StringBuilder sb = new StringBuilder(); + for(int i=0; i0) sb.append(separator); + Object column = soi.getStructFieldData(obj, fields.get(i)); + if (fields.get(i).getFieldObjectInspector().getCategory() == Category.PRIMITIVE) { + // For primitive object, serialize to plain string + sb.append(column == null ? nullString : column.toString()); + } else { + // For complex object, serialize to JSON format + sb.append(SerDeUtils.getJSONString(column, fields.get(i).getFieldObjectInspector())); + } + } + serializeCache.set(sb.toString()); + return serializeCache; + } + +} diff --git a/ql/src/test/queries/clientnegative/strict_pruning.q b/ql/src/test/queries/clientnegative/strict_pruning.q new file mode 100644 index 000000000..91777bb05 --- /dev/null +++ b/ql/src/test/queries/clientnegative/strict_pruning.q @@ -0,0 +1,6 @@ +set hive.partition.pruning=strict; + +EXPLAIN +SELECT count(1) FROM srcpart; + +SELECT count(1) FROM srcpart; diff --git a/ql/src/test/queries/clientpositive/case_sensitivity.q b/ql/src/test/queries/clientpositive/case_sensitivity.q new file mode 100644 index 000000000..073a31a07 --- /dev/null +++ b/ql/src/test/queries/clientpositive/case_sensitivity.q @@ -0,0 +1,10 @@ +CREATE TABLE DEST1(Key INT, VALUE STRING); + +EXPLAIN +FROM SRC_THRIFT +INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0; + +FROM SRC_THRIFT +INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0; + +SELECT DEST1.* FROM Dest1; diff --git a/ql/src/test/queries/clientpositive/groupby1_limit.q b/ql/src/test/queries/clientpositive/groupby1_limit.q new file mode 100644 index 000000000..7763120c8 --- /dev/null +++ b/ql/src/test/queries/clientpositive/groupby1_limit.q @@ -0,0 +1,10 @@ +set mapred.reduce.tasks=31; + +CREATE TABLE dest1(key INT, value DOUBLE); + +EXPLAIN +FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key LIMIT 5; + +FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key LIMIT 5; + +SELECT dest1.* FROM dest1; diff --git a/ql/src/test/queries/clientpositive/groupby2_limit.q b/ql/src/test/queries/clientpositive/groupby2_limit.q new file mode 100644 index 000000000..7064438b7 --- /dev/null +++ b/ql/src/test/queries/clientpositive/groupby2_limit.q @@ -0,0 +1,7 @@ +set mapred.reduce.tasks=31; + +EXPLAIN +SELECT src.key, sum(substr(src.value,4)) FROM src GROUP BY src.key LIMIT 5; + +SELECT src.key, sum(substr(src.value,4)) FROM src GROUP BY src.key LIMIT 5; + diff --git a/ql/src/test/queries/clientpositive/input.q b/ql/src/test/queries/clientpositive/input.q new file mode 100644 index 000000000..567d3b07f --- /dev/null +++ b/ql/src/test/queries/clientpositive/input.q @@ -0,0 +1,4 @@ +EXPLAIN +SELECT x.* FROM SRC x; + +SELECT x.* FROM SRC x; diff --git a/ql/src/test/queries/clientpositive/input11_limit.q b/ql/src/test/queries/clientpositive/input11_limit.q new file mode 100644 index 000000000..13b88fcbf --- /dev/null +++ b/ql/src/test/queries/clientpositive/input11_limit.q @@ -0,0 +1,10 @@ +CREATE TABLE dest1(key INT, value STRING); + +EXPLAIN +FROM src +INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10; + +FROM src +INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10; + +SELECT dest1.* FROM dest1; diff --git a/ql/src/test/queries/clientpositive/input14_limit.q b/ql/src/test/queries/clientpositive/input14_limit.q new file mode 100644 index 000000000..570f7f00c --- /dev/null +++ b/ql/src/test/queries/clientpositive/input14_limit.q @@ -0,0 +1,20 @@ +CREATE TABLE dest1(key INT, value STRING); + +EXPLAIN +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) + USING '/bin/cat' + CLUSTER BY tkey LIMIT 20 +) tmap +INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; + +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) + USING '/bin/cat' + CLUSTER BY tkey LIMIT 20 +) tmap +INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; + +SELECT dest1.* FROM dest1; diff --git a/ql/src/test/queries/clientpositive/input16.q b/ql/src/test/queries/clientpositive/input16.q new file mode 100644 index 000000000..365b1a07b --- /dev/null +++ b/ql/src/test/queries/clientpositive/input16.q @@ -0,0 +1,5 @@ +-- TestSerDe is a user defined serde where the default delimiter is Ctrl-B +CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERIALIZER 'org.apache.hadoop.hive.serde2.TestSerDe'; +LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16; +SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16; +DROP TABLE INPUT16; diff --git a/ql/src/test/queries/clientpositive/input16_cc.q b/ql/src/test/queries/clientpositive/input16_cc.q new file mode 100644 index 000000000..9537401d4 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input16_cc.q @@ -0,0 +1,6 @@ +-- TestSerDe is a user defined serde where the default delimiter is Ctrl-B +-- the user is overwriting it with ctrlC +CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERIALIZER 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val'); +LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC; +SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC; +DROP TABLE INPUT16_CC; diff --git a/ql/src/test/queries/clientpositive/input1_limit.q b/ql/src/test/queries/clientpositive/input1_limit.q new file mode 100644 index 000000000..730440228 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input1_limit.q @@ -0,0 +1,18 @@ +CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest2(key INT, value STRING); + +EXPLAIN +FROM src +INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10 +INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5; + +FROM src +INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10 +INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5; + +SELECT dest1.* FROM dest1; +SELECT dest2.* FROM dest2; + +DROP TABLE dest1; +DROP TABLE dest2; + diff --git a/ql/src/test/queries/clientpositive/input2_limit.q b/ql/src/test/queries/clientpositive/input2_limit.q new file mode 100644 index 000000000..81045100d --- /dev/null +++ b/ql/src/test/queries/clientpositive/input2_limit.q @@ -0,0 +1,4 @@ +EXPLAIN +SELECT x.* FROM SRC x WHERE x.key < 300 LIMIT 5; + +SELECT x.* FROM SRC x WHERE x.key < 300 LIMIT 5; diff --git a/ql/src/test/queries/clientpositive/input3.q b/ql/src/test/queries/clientpositive/input3.q index c8d514e80..53ec0b40d 100644 --- a/ql/src/test/queries/clientpositive/input3.q +++ b/ql/src/test/queries/clientpositive/input3.q @@ -1,3 +1,6 @@ +DROP TABLE TEST3a; +DROP TABLE TEST3b; +DROP TABLE TEST3c; CREATE TABLE TEST3a(A INT, B FLOAT); DESCRIBE TEST3a; CREATE TABLE TEST3b(A ARRAY, B FLOAT, C MAP); @@ -12,5 +15,9 @@ ALTER TABLE TEST3b RENAME TO TEST3c; ALTER TABLE TEST3b RENAME TO TEST3c; DESCRIBE TEST3c; SHOW TABLES; +EXPLAIN +ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 FLOAT); +ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 FLOAT); +DESCRIBE EXTENDED TEST3c; DROP TABLE TEST3c; DROP TABLE TEST3a; diff --git a/ql/src/test/queries/clientpositive/input4_cb_delim.q b/ql/src/test/queries/clientpositive/input4_cb_delim.q new file mode 100644 index 000000000..46bfb28b0 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input4_cb_delim.q @@ -0,0 +1,4 @@ +CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012'; +LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB; +SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB; +DROP TABLE INPUT4_CB diff --git a/ql/src/test/queries/clientpositive/input_dfs.q b/ql/src/test/queries/clientpositive/input_dfs.q new file mode 100644 index 000000000..4f1c30992 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input_dfs.q @@ -0,0 +1,4 @@ +dfs -cat ../../../../build/contrib/hive/ql/test/data/files/kv1.txt; +set fs.default.name=file://src +dfs -ls + diff --git a/ql/src/test/queries/clientpositive/input_dynamicserde.q b/ql/src/test/queries/clientpositive/input_dynamicserde.q new file mode 100644 index 000000000..f3dce64e3 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input_dynamicserde.q @@ -0,0 +1,15 @@ +CREATE TABLE dest1(a array, b array, c map, d int, e string) +ROW FORMAT DELIMITED +FIELDS TERMINATED BY '1' +COLLECTION ITEMS TERMINATED BY '2' +MAP KEYS TERMINATED BY '3' +LINES TERMINATED BY '10'; + +EXPLAIN +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring; + +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring; + +SELECT dest1.* FROM dest1; diff --git a/ql/src/test/queries/clientpositive/input_limit.q b/ql/src/test/queries/clientpositive/input_limit.q new file mode 100644 index 000000000..6d4363e59 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input_limit.q @@ -0,0 +1,4 @@ +EXPLAIN +SELECT x.* FROM SRC x LIMIT 20; + +SELECT x.* FROM SRC x LIMIT 20; diff --git a/ql/src/test/queries/clientpositive/input_part1.q b/ql/src/test/queries/clientpositive/input_part1.q index a48c823fb..761cbbc76 100644 --- a/ql/src/test/queries/clientpositive/input_part1.q +++ b/ql/src/test/queries/clientpositive/input_part1.q @@ -1,6 +1,6 @@ CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING); -EXPLAIN +EXPLAIN EXTENDED FROM srcpart INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'; diff --git a/ql/src/test/queries/clientpositive/input_part2.q b/ql/src/test/queries/clientpositive/input_part2.q new file mode 100644 index 000000000..c7e2c0db3 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input_part2.q @@ -0,0 +1,16 @@ +CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING); +CREATE TABLE dest2(key INT, value STRING, hr STRING, ds STRING); + +EXPLAIN EXTENDED +FROM srcpart +INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12' +INSERT OVERWRITE TABLE dest2 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-09' and srcpart.hr = '12'; + +FROM srcpart +INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12' +INSERT OVERWRITE TABLE dest2 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-09' and srcpart.hr = '12'; + +SELECT dest1.* FROM dest1; +SELECT dest2.* FROM dest2; + +drop table dest2; diff --git a/ql/src/test/queries/clientpositive/input_testxpath.q b/ql/src/test/queries/clientpositive/input_testxpath.q index 73113020a..830eac02e 100755 --- a/ql/src/test/queries/clientpositive/input_testxpath.q +++ b/ql/src/test/queries/clientpositive/input_testxpath.q @@ -1,10 +1,10 @@ -CREATE TABLE dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING, mapvalue STRING); EXPLAIN FROM src_thrift -INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring; +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2']; FROM src_thrift -INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring; +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2']; SELECT dest1.* FROM dest1; diff --git a/ql/src/test/queries/clientpositive/input_testxpath2.q b/ql/src/test/queries/clientpositive/input_testxpath2.q new file mode 100644 index 000000000..47d7ce788 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input_testxpath2.q @@ -0,0 +1,10 @@ +CREATE TABLE dest1(lint_size INT, lintstring_size INT, mstringstring_size INT); + +EXPLAIN +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL); + +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL); + +SELECT dest1.* FROM dest1; diff --git a/ql/src/test/queries/clientpositive/input_testxpath3.q b/ql/src/test/queries/clientpositive/input_testxpath3.q new file mode 100644 index 000000000..1b57ead82 --- /dev/null +++ b/ql/src/test/queries/clientpositive/input_testxpath3.q @@ -0,0 +1,6 @@ +EXPLAIN +FROM src_thrift +SELECT src_thrift.mstringstring['key_9'], src_thrift.lintstring.myint; + +FROM src_thrift +SELECT src_thrift.mstringstring['key_9'], src_thrift.lintstring.myint; diff --git a/ql/src/test/queries/clientpositive/inputddl4.q b/ql/src/test/queries/clientpositive/inputddl4.q index 85fcf6d0f..924e186c5 100644 --- a/ql/src/test/queries/clientpositive/inputddl4.q +++ b/ql/src/test/queries/clientpositive/inputddl4.q @@ -1,4 +1,5 @@ -- a simple test to test sorted/clustered syntax +DROP TABLE INPUTDDL4; CREATE TABLE INPUTDDL4(viewTime DATETIME, userid INT, page_url STRING, referrer_url STRING, friends ARRAY, properties MAP, @@ -7,4 +8,5 @@ CREATE TABLE INPUTDDL4(viewTime DATETIME, userid INT, PARTITIONED BY(ds DATETIME, country STRING) CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS; DESCRIBE INPUTDDL4; +DESCRIBE EXTENDED INPUTDDL4; DROP TABLE INPUTDDL4; diff --git a/ql/src/test/queries/clientpositive/inputddl6.q b/ql/src/test/queries/clientpositive/inputddl6.q new file mode 100644 index 000000000..f12eced5f --- /dev/null +++ b/ql/src/test/queries/clientpositive/inputddl6.q @@ -0,0 +1,16 @@ +-- test for describe extended table +-- test for describe extended table partition +-- test for alter table drop partition +DROP TABLE INPUTDDL6; +CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME); +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09'); +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08'); +DESCRIBE EXTENDED INPUTDDL6; +DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-08'); +SHOW PARTITIONS INPUTDDL6; +ALTER TABLE INPUTDDL6 DROP PARTITION (ds='2008-04-08'); +SHOW PARTITIONS INPUTDDL6; +DROP TABLE INPUTDDL6; +EXPLAIN +DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-09'); + diff --git a/ql/src/test/queries/clientpositive/join9.q b/ql/src/test/queries/clientpositive/join9.q new file mode 100644 index 000000000..b17914956 --- /dev/null +++ b/ql/src/test/queries/clientpositive/join9.q @@ -0,0 +1,10 @@ +CREATE TABLE dest1(key INT, value STRING); + +EXPLAIN EXTENDED +FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) +INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'; + +FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) +INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'; + +SELECT dest1.* FROM dest1; diff --git a/ql/src/test/queries/clientpositive/nullinput.q b/ql/src/test/queries/clientpositive/nullinput.q new file mode 100644 index 000000000..119358fb5 --- /dev/null +++ b/ql/src/test/queries/clientpositive/nullinput.q @@ -0,0 +1,4 @@ +create table tstnullinut(a string, b string); +select x.* from tstnullinut x; +select x.a, count(1) from tstnullinut x group by x.a; +drop table tstnullinut; diff --git a/ql/src/test/queries/clientpositive/show_tables.q b/ql/src/test/queries/clientpositive/show_tables.q new file mode 100644 index 000000000..160ff3252 --- /dev/null +++ b/ql/src/test/queries/clientpositive/show_tables.q @@ -0,0 +1,15 @@ +CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME); +CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME); + +EXPLAIN +SHOW TABLES 'shtb_*'; + +SHOW TABLES 'shtb_*'; + +EXPLAIN +SHOW TABLES 'shtb_test1|shtb_test2'; + +SHOW TABLES 'shtb_test1|shtb_test2'; + +DROP TABLE shtb_test1; +DROP TABLE shtb_test2; diff --git a/ql/src/test/queries/clientpositive/showparts.q b/ql/src/test/queries/clientpositive/showparts.q new file mode 100644 index 000000000..dbee3efc2 --- /dev/null +++ b/ql/src/test/queries/clientpositive/showparts.q @@ -0,0 +1,4 @@ +EXPLAIN +SHOW PARTITIONS srcpart; + +SHOW PARTITIONS srcpart; diff --git a/ql/src/test/queries/clientpositive/subq2.q b/ql/src/test/queries/clientpositive/subq2.q new file mode 100644 index 000000000..7e174da45 --- /dev/null +++ b/ql/src/test/queries/clientpositive/subq2.q @@ -0,0 +1,8 @@ +EXPLAIN +SELECT a.k, a.c +FROM (SELECT b.key as k, count(1) as c FROM src b GROUP BY b.key) a +WHERE a.k >= 90; + +SELECT a.k, a.c +FROM (SELECT b.key as k, count(1) as c FROM src b GROUP BY b.key) a +WHERE a.k >= 90; diff --git a/ql/src/test/queries/clientpositive/udf2.q b/ql/src/test/queries/clientpositive/udf2.q index f877df114..19983a3b1 100644 --- a/ql/src/test/queries/clientpositive/udf2.q +++ b/ql/src/test/queries/clientpositive/udf2.q @@ -1,10 +1,8 @@ -EXPLAIN -CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength'; - -CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength'; +CREATE TABLE dest1(c1 STRING); -CREATE TABLE dest1(len INT); +FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86; -FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength(src.value); +EXPLAIN +SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1; -SELECT dest1.* FROM dest1; +SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1; diff --git a/ql/src/test/queries/clientpositive/udf3.q b/ql/src/test/queries/clientpositive/udf3.q new file mode 100644 index 000000000..6e62235c2 --- /dev/null +++ b/ql/src/test/queries/clientpositive/udf3.q @@ -0,0 +1,10 @@ +CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING); + +EXPLAIN +FROM src INSERT OVERWRITE TABLE dest1 SELECT count(CAST('' AS INT)), sum(CAST('' AS INT)), avg(CAST('' AS INT)), +min(CAST('' AS INT)), max(CAST('' AS INT)); + +FROM src INSERT OVERWRITE TABLE dest1 SELECT count(CAST('' AS INT)), sum(CAST('' AS INT)), avg(CAST('' AS INT)), +min(CAST('' AS INT)), max(CAST('' AS INT)); + +SELECT dest1.* FROM dest1; diff --git a/ql/src/test/queries/clientpositive/udf4.q b/ql/src/test/queries/clientpositive/udf4.q new file mode 100644 index 000000000..b0139dfe2 --- /dev/null +++ b/ql/src/test/queries/clientpositive/udf4.q @@ -0,0 +1,8 @@ +CREATE TABLE dest1(c1 STRING); + +FROM src INSERT OVERWRITE TABLE dest1 SELECT ' abc ' WHERE src.key = 86; + +EXPLAIN +SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1; + +SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1; diff --git a/ql/src/test/queries/negative/invalid_dot.q b/ql/src/test/queries/negative/invalid_dot.q new file mode 100644 index 000000000..36b9bd2a3 --- /dev/null +++ b/ql/src/test/queries/negative/invalid_dot.q @@ -0,0 +1,2 @@ +FROM src +INSERT OVERWRITE TABLE dest1 SELECT src.value.member WHERE src.key < 100 diff --git a/ql/src/test/queries/negative/invalid_index.q b/ql/src/test/queries/negative/invalid_index.q new file mode 100644 index 000000000..146bc5dc9 --- /dev/null +++ b/ql/src/test/queries/negative/invalid_index.q @@ -0,0 +1,2 @@ +FROM src +INSERT OVERWRITE TABLE dest1 SELECT src.key[0], src.value diff --git a/ql/src/test/queries/negative/invalid_list_index.q b/ql/src/test/queries/negative/invalid_list_index.q new file mode 100644 index 000000000..c40f079f6 --- /dev/null +++ b/ql/src/test/queries/negative/invalid_list_index.q @@ -0,0 +1,2 @@ +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[0], src_thrift.lstring['abc'] diff --git a/ql/src/test/queries/negative/invalid_list_index2.q b/ql/src/test/queries/negative/invalid_list_index2.q new file mode 100644 index 000000000..99d0b3d41 --- /dev/null +++ b/ql/src/test/queries/negative/invalid_list_index2.q @@ -0,0 +1,2 @@ +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[0], src_thrift.lstring[1 + 2] diff --git a/ql/src/test/queries/negative/invalid_map_index.q b/ql/src/test/queries/negative/invalid_map_index.q new file mode 100644 index 000000000..c2b9eab61 --- /dev/null +++ b/ql/src/test/queries/negative/invalid_map_index.q @@ -0,0 +1,2 @@ +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[0], src_thrift.mstringstring[0] diff --git a/ql/src/test/queries/negative/invalid_map_index2.q b/ql/src/test/queries/negative/invalid_map_index2.q new file mode 100644 index 000000000..5828f0709 --- /dev/null +++ b/ql/src/test/queries/negative/invalid_map_index2.q @@ -0,0 +1,2 @@ +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[0], src_thrift.mstringstring[concat('abc', 'abc')] diff --git a/ql/src/test/queries/positive/case_sensitivity.q b/ql/src/test/queries/positive/case_sensitivity.q new file mode 100644 index 000000000..d7f737150 --- /dev/null +++ b/ql/src/test/queries/positive/case_sensitivity.q @@ -0,0 +1,2 @@ +FROM SRC_THRIFT +INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0 diff --git a/ql/src/test/queries/positive/input_testxpath.q b/ql/src/test/queries/positive/input_testxpath.q index 35b53ec9b..b8abe04b2 100755 --- a/ql/src/test/queries/positive/input_testxpath.q +++ b/ql/src/test/queries/positive/input_testxpath.q @@ -1,2 +1,2 @@ FROM src_thrift -INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring +INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2'] diff --git a/ql/src/test/queries/positive/input_testxpath2.q b/ql/src/test/queries/positive/input_testxpath2.q new file mode 100644 index 000000000..c5380bd24 --- /dev/null +++ b/ql/src/test/queries/positive/input_testxpath2.q @@ -0,0 +1,2 @@ +FROM src_thrift +INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL) diff --git a/ql/src/test/queries/positive/udf4.q b/ql/src/test/queries/positive/udf4.q new file mode 100644 index 000000000..b6a7fd992 --- /dev/null +++ b/ql/src/test/queries/positive/udf4.q @@ -0,0 +1 @@ +SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1 diff --git a/ql/src/test/results/clientnegative/strict_pruning.q.out b/ql/src/test/results/clientnegative/strict_pruning.q.out new file mode 100644 index 000000000..ab6898c48 --- /dev/null +++ b/ql/src/test/results/clientnegative/strict_pruning.q.out @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: line 2:7 No Partition Predicate Found 1: for Alias srcpart Table srcpart diff --git a/ql/src/test/results/clientpositive/case_sensitivity.q.out b/ql/src/test/results/clientpositive/case_sensitivity.q.out new file mode 100644 index 000000000..fce2e904e --- /dev/null +++ b/ql/src/test/results/clientpositive/case_sensitivity.q.out @@ -0,0 +1,49 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF SRC_THRIFT)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_Thrift LINT) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) MYSTRING))) (TOK_WHERE (> ([ (TOK_COLREF src_thrift liNT) 0) 0)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src_thrift + Filter Operator + predicate: + expr: (lint[0] > 0) + type: Boolean + Select Operator + expressions: + expr: lint[1] + type: int + expr: lintstring[0].MYSTRING + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +2 1 +4 8 +6 27 +8 64 +10 125 +12 216 +14 343 +16 512 +18 729 diff --git a/ql/src/test/results/clientpositive/cast1.q.out b/ql/src/test/results/clientpositive/cast1.q.out index 161f3361b..b2b81b5ef 100644 --- a/ql/src/test/results/clientpositive/cast1.q.out +++ b/ql/src/test/results/clientpositive/cast1.q.out @@ -32,21 +32,20 @@ STAGE PLANS: type: int File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 5 5.0 5.0 5.0 5 false 1 - diff --git a/ql/src/test/results/clientpositive/groupby1.q.out b/ql/src/test/results/clientpositive/groupby1.q.out index 2c5210cac..cd07a4a27 100644 --- a/ql/src/test/results/clientpositive/groupby1.q.out +++ b/ql/src/test/results/clientpositive/groupby1.q.out @@ -9,46 +9,51 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Group By Operator - keys: - expr: VALUE.2 - type: string - mode: partial1 - - expr: sum(VALUE.3) - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src Reduce Output Operator - tag: -1 key expressions: - expr: 0 - type: int - value expressions: - expr: key - type: string - expr: value - type: string expr: key type: string + # partition fields: -1 + tag: -1 + value expressions: expr: substr(value, 4) type: string - # partition fields: -1 + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Stage: Stage-2 Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/748679827/1407352694.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 + value expressions: + expr: 1 + type: string Reduce Operator Tree: Group By Operator + + expr: sum(VALUE.0) keys: expr: KEY.0 type: string mode: partial2 - - expr: sum(VALUE.0) Select Operator expressions: expr: 0 @@ -57,31 +62,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - Alias -> Map Operator Tree: - /tmp/hive-zshao/178504461.10001 - Reduce Output Operator - tag: -1 - key expressions: - expr: 0 - type: string - value expressions: - expr: 1 - type: string - # partition fields: 1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 0.0 @@ -393,4 +387,3 @@ STAGE PLANS: 96 96.0 97 194.0 98 196.0 - diff --git a/ql/src/test/results/clientpositive/groupby1_limit.q.out b/ql/src/test/results/clientpositive/groupby1_limit.q.out new file mode 100644 index 000000000..c186d146a --- /dev/null +++ b/ql/src/test/results/clientpositive/groupby1_limit.q.out @@ -0,0 +1,86 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4)))) (TOK_GROUPBY (TOK_COLREF src key)) (TOK_LIMIT 5))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Reduce Output Operator + key expressions: + expr: key + type: string + # partition fields: -1 + tag: -1 + value expressions: + expr: substr(value, 4) + type: string + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/7427260/341902671.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 + value expressions: + expr: 1 + type: string + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial2 + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + Limit + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +0 0.0 +10 10.0 +100 200.0 +103 206.0 +104 208.0 diff --git a/ql/src/test/results/clientpositive/groupby2.q.out b/ql/src/test/results/clientpositive/groupby2.q.out index 7dfef1ced..a7f07cbf8 100644 --- a/ql/src/test/results/clientpositive/groupby2.q.out +++ b/ql/src/test/results/clientpositive/groupby2.q.out @@ -9,46 +9,54 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Group By Operator - keys: - expr: VALUE.2 - type: string - mode: partial1 - - expr: count(DISTINCT KEY.0) - expr: sum(KEY.0) - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src Reduce Output Operator - tag: -1 key expressions: - expr: substr(value, 4) - type: string - value expressions: - expr: key - type: string - expr: value - type: string expr: substr(key, 0, 1) type: string - # partition fields: 1 + expr: substr(value, 4) + type: string + # partition fields: 2147483647 + tag: -1 + Reduce Operator Tree: + Group By Operator + + expr: count(DISTINCT KEY.1) + expr: sum(KEY.1) + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Stage: Stage-2 Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/307368091/808162418.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 + value expressions: + expr: 1 + type: string + expr: 2 + type: string Reduce Operator Tree: Group By Operator + + expr: count(VALUE.0) + expr: sum(VALUE.1) keys: expr: KEY.0 type: string mode: partial2 - - expr: count(VALUE.0) - expr: sum(VALUE.1) Select Operator expressions: expr: 0 @@ -59,33 +67,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - Alias -> Map Operator Tree: - /tmp/hive-zshao/29356866.10001 - Reduce Output Operator - tag: -1 - key expressions: - expr: 0 - type: string - value expressions: - expr: 1 - type: string - expr: 2 - type: string - # partition fields: 1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 1 00.0 @@ -98,4 +93,3 @@ STAGE PLANS: 7 6 7735.0 8 8 8762.0 9 7 91047.0 - diff --git a/ql/src/test/results/clientpositive/groupby2_limit.q.out b/ql/src/test/results/clientpositive/groupby2_limit.q.out new file mode 100644 index 000000000..272c841de --- /dev/null +++ b/ql/src/test/results/clientpositive/groupby2_limit.q.out @@ -0,0 +1,78 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4)))) (TOK_GROUPBY (TOK_COLREF src key)) (TOK_LIMIT 5))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Reduce Output Operator + key expressions: + expr: key + type: string + # partition fields: -1 + tag: -1 + value expressions: + expr: substr(value, 4) + type: string + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/606295988/175965730.10002 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 + value expressions: + expr: 1 + type: string + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial2 + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + Limit + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: 5 + + +0 0.0 +10 10.0 +100 200.0 +103 206.0 +104 208.0 diff --git a/ql/src/test/results/clientpositive/groupby3.q.out b/ql/src/test/results/clientpositive/groupby3.q.out index 4370bcc14..744b950a2 100644 --- a/ql/src/test/results/clientpositive/groupby3.q.out +++ b/ql/src/test/results/clientpositive/groupby3.q.out @@ -9,44 +9,55 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Group By Operator - mode: partial1 - - expr: avg(DISTINCT KEY.0) - expr: sum(KEY.0) - expr: avg(KEY.0) - expr: min(KEY.0) - expr: max(KEY.0) - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src Reduce Output Operator - tag: -1 key expressions: expr: substr(value, 4) type: string - value expressions: - expr: key - type: string - expr: value - type: string - # partition fields: 1 + # partition fields: 2147483647 + tag: -1 + Reduce Operator Tree: + Group By Operator + + expr: avg(DISTINCT KEY.0) + expr: sum(KEY.0) + expr: avg(KEY.0) + expr: min(KEY.0) + expr: max(KEY.0) + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Stage: Stage-2 Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/629715569/118113569.10001 + Reduce Output Operator + # partition fields: 0 + tag: -1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + expr: 4 + type: string Reduce Operator Tree: Group By Operator - mode: partial2 expr: avg(VALUE.0) expr: sum(VALUE.1) expr: avg(VALUE.2) expr: min(VALUE.3) expr: max(VALUE.4) + mode: partial2 Select Operator expressions: expr: 1 @@ -61,37 +72,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - Alias -> Map Operator Tree: - /tmp/hive-zshao/52918796.10001 - Reduce Output Operator - tag: -1 - value expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - expr: 4 - type: string - # partition fields: 0 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 130091.0 260.182 256.10355987055016 498.0 0.0 - diff --git a/ql/src/test/results/clientpositive/groupby4.q.out b/ql/src/test/results/clientpositive/groupby4.q.out index d6b4baf25..28a060553 100644 --- a/ql/src/test/results/clientpositive/groupby4.q.out +++ b/ql/src/test/results/clientpositive/groupby4.q.out @@ -9,34 +9,35 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Group By Operator - keys: - expr: VALUE.2 - type: string - mode: partial1 - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src Reduce Output Operator - tag: -1 key expressions: - expr: 0 - type: int - value expressions: - expr: key - type: string - expr: value - type: string expr: substr(key, 0, 1) type: string # partition fields: -1 + tag: -1 + Reduce Operator Tree: + Group By Operator + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Stage: Stage-2 Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/1561965178/525265780.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 Reduce Operator Tree: Group By Operator keys: @@ -49,28 +50,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - Alias -> Map Operator Tree: - /tmp/hive-zshao/64182502.10001 - Reduce Output Operator - tag: -1 - key expressions: - expr: 0 - type: string - # partition fields: 1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 @@ -83,4 +76,3 @@ STAGE PLANS: 7 8 9 - diff --git a/ql/src/test/results/clientpositive/groupby5.q.out b/ql/src/test/results/clientpositive/groupby5.q.out index b19ebd207..b59319d01 100644 --- a/ql/src/test/results/clientpositive/groupby5.q.out +++ b/ql/src/test/results/clientpositive/groupby5.q.out @@ -9,46 +9,51 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Group By Operator - keys: - expr: VALUE.2 - type: string - mode: partial1 - - expr: sum(VALUE.3) - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src Reduce Output Operator - tag: -1 key expressions: - expr: 0 - type: int - value expressions: - expr: key - type: string - expr: value - type: string expr: key type: string + # partition fields: -1 + tag: -1 + value expressions: expr: substr(value, 4) type: string - # partition fields: -1 + Reduce Operator Tree: + Group By Operator + + expr: sum(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Stage: Stage-2 Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/888102295/1013886705.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 + value expressions: + expr: 1 + type: string Reduce Operator Tree: Group By Operator + + expr: sum(VALUE.0) keys: expr: KEY.0 type: string mode: partial2 - - expr: sum(VALUE.0) Select Operator expressions: expr: 0 @@ -57,31 +62,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - Alias -> Map Operator Tree: - /tmp/hive-zshao/317976905.10001 - Reduce Output Operator - tag: -1 - key expressions: - expr: 0 - type: string - value expressions: - expr: 1 - type: string - # partition fields: 1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 0.0 @@ -393,4 +387,3 @@ STAGE PLANS: 96 96.0 97 194.0 98 196.0 - diff --git a/ql/src/test/results/clientpositive/groupby6.q.out b/ql/src/test/results/clientpositive/groupby6.q.out index ef9d4ead0..a74c5daf6 100644 --- a/ql/src/test/results/clientpositive/groupby6.q.out +++ b/ql/src/test/results/clientpositive/groupby6.q.out @@ -9,34 +9,35 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Group By Operator - keys: - expr: VALUE.2 - type: string - mode: partial1 - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src Reduce Output Operator - tag: -1 key expressions: - expr: 0 - type: int - value expressions: - expr: key - type: string - expr: value - type: string expr: substr(value, 4, 1) type: string # partition fields: -1 + tag: -1 + Reduce Operator Tree: + Group By Operator + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Stage: Stage-2 Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/256745338/35530060.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 Reduce Operator Tree: Group By Operator keys: @@ -49,28 +50,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - Alias -> Map Operator Tree: - /tmp/hive-zshao/286498248.10001 - Reduce Output Operator - tag: -1 - key expressions: - expr: 0 - type: string - # partition fields: 1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 @@ -83,4 +76,3 @@ STAGE PLANS: 7 8 9 - diff --git a/ql/src/test/results/clientpositive/groupby7.q.out b/ql/src/test/results/clientpositive/groupby7.q.out index 852eaad19..1e61dc12b 100644 --- a/ql/src/test/results/clientpositive/groupby7.q.out +++ b/ql/src/test/results/clientpositive/groupby7.q.out @@ -307,7 +307,6 @@ 96 96.0 97 194.0 98 196.0 - 0 0.0 10 10.0 100 200.0 @@ -617,4 +616,3 @@ 96 96.0 97 194.0 98 196.0 - diff --git a/ql/src/test/results/clientpositive/groupby8.q.out b/ql/src/test/results/clientpositive/groupby8.q.out index ace6b166f..1af875072 100644 --- a/ql/src/test/results/clientpositive/groupby8.q.out +++ b/ql/src/test/results/clientpositive/groupby8.q.out @@ -307,7 +307,6 @@ 96 1 97 1 98 1 - 0 1 10 1 100 1 @@ -617,4 +616,3 @@ 96 1 97 1 98 1 - diff --git a/ql/src/test/results/clientpositive/input.q.out b/ql/src/test/results/clientpositive/input.q.out new file mode 100644 index 000000000..a6269e630 --- /dev/null +++ b/ql/src/test/results/clientpositive/input.q.out @@ -0,0 +1,512 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))))) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + + +238 val_238 +86 val_86 +311 val_311 +27 val_27 +165 val_165 +409 val_409 +255 val_255 +278 val_278 +98 val_98 +484 val_484 +265 val_265 +193 val_193 +401 val_401 +150 val_150 +273 val_273 +224 val_224 +369 val_369 +66 val_66 +128 val_128 +213 val_213 +146 val_146 +406 val_406 +429 val_429 +374 val_374 +152 val_152 +469 val_469 +145 val_145 +495 val_495 +37 val_37 +327 val_327 +281 val_281 +277 val_277 +209 val_209 +15 val_15 +82 val_82 +403 val_403 +166 val_166 +417 val_417 +430 val_430 +252 val_252 +292 val_292 +219 val_219 +287 val_287 +153 val_153 +193 val_193 +338 val_338 +446 val_446 +459 val_459 +394 val_394 +237 val_237 +482 val_482 +174 val_174 +413 val_413 +494 val_494 +207 val_207 +199 val_199 +466 val_466 +208 val_208 +174 val_174 +399 val_399 +396 val_396 +247 val_247 +417 val_417 +489 val_489 +162 val_162 +377 val_377 +397 val_397 +309 val_309 +365 val_365 +266 val_266 +439 val_439 +342 val_342 +367 val_367 +325 val_325 +167 val_167 +195 val_195 +475 val_475 +17 val_17 +113 val_113 +155 val_155 +203 val_203 +339 val_339 +0 val_0 +455 val_455 +128 val_128 +311 val_311 +316 val_316 +57 val_57 +302 val_302 +205 val_205 +149 val_149 +438 val_438 +345 val_345 +129 val_129 +170 val_170 +20 val_20 +489 val_489 +157 val_157 +378 val_378 +221 val_221 +92 val_92 +111 val_111 +47 val_47 +72 val_72 +4 val_4 +280 val_280 +35 val_35 +427 val_427 +277 val_277 +208 val_208 +356 val_356 +399 val_399 +169 val_169 +382 val_382 +498 val_498 +125 val_125 +386 val_386 +437 val_437 +469 val_469 +192 val_192 +286 val_286 +187 val_187 +176 val_176 +54 val_54 +459 val_459 +51 val_51 +138 val_138 +103 val_103 +239 val_239 +213 val_213 +216 val_216 +430 val_430 +278 val_278 +176 val_176 +289 val_289 +221 val_221 +65 val_65 +318 val_318 +332 val_332 +311 val_311 +275 val_275 +137 val_137 +241 val_241 +83 val_83 +333 val_333 +180 val_180 +284 val_284 +12 val_12 +230 val_230 +181 val_181 +67 val_67 +260 val_260 +404 val_404 +384 val_384 +489 val_489 +353 val_353 +373 val_373 +272 val_272 +138 val_138 +217 val_217 +84 val_84 +348 val_348 +466 val_466 +58 val_58 +8 val_8 +411 val_411 +230 val_230 +208 val_208 +348 val_348 +24 val_24 +463 val_463 +431 val_431 +179 val_179 +172 val_172 +42 val_42 +129 val_129 +158 val_158 +119 val_119 +496 val_496 +0 val_0 +322 val_322 +197 val_197 +468 val_468 +393 val_393 +454 val_454 +100 val_100 +298 val_298 +199 val_199 +191 val_191 +418 val_418 +96 val_96 +26 val_26 +165 val_165 +327 val_327 +230 val_230 +205 val_205 +120 val_120 +131 val_131 +51 val_51 +404 val_404 +43 val_43 +436 val_436 +156 val_156 +469 val_469 +468 val_468 +308 val_308 +95 val_95 +196 val_196 +288 val_288 +481 val_481 +457 val_457 +98 val_98 +282 val_282 +197 val_197 +187 val_187 +318 val_318 +318 val_318 +409 val_409 +470 val_470 +137 val_137 +369 val_369 +316 val_316 +169 val_169 +413 val_413 +85 val_85 +77 val_77 +0 val_0 +490 val_490 +87 val_87 +364 val_364 +179 val_179 +118 val_118 +134 val_134 +395 val_395 +282 val_282 +138 val_138 +238 val_238 +419 val_419 +15 val_15 +118 val_118 +72 val_72 +90 val_90 +307 val_307 +19 val_19 +435 val_435 +10 val_10 +277 val_277 +273 val_273 +306 val_306 +224 val_224 +309 val_309 +389 val_389 +327 val_327 +242 val_242 +369 val_369 +392 val_392 +272 val_272 +331 val_331 +401 val_401 +242 val_242 +452 val_452 +177 val_177 +226 val_226 +5 val_5 +497 val_497 +402 val_402 +396 val_396 +317 val_317 +395 val_395 +58 val_58 +35 val_35 +336 val_336 +95 val_95 +11 val_11 +168 val_168 +34 val_34 +229 val_229 +233 val_233 +143 val_143 +472 val_472 +322 val_322 +498 val_498 +160 val_160 +195 val_195 +42 val_42 +321 val_321 +430 val_430 +119 val_119 +489 val_489 +458 val_458 +78 val_78 +76 val_76 +41 val_41 +223 val_223 +492 val_492 +149 val_149 +449 val_449 +218 val_218 +228 val_228 +138 val_138 +453 val_453 +30 val_30 +209 val_209 +64 val_64 +468 val_468 +76 val_76 +74 val_74 +342 val_342 +69 val_69 +230 val_230 +33 val_33 +368 val_368 +103 val_103 +296 val_296 +113 val_113 +216 val_216 +367 val_367 +344 val_344 +167 val_167 +274 val_274 +219 val_219 +239 val_239 +485 val_485 +116 val_116 +223 val_223 +256 val_256 +263 val_263 +70 val_70 +487 val_487 +480 val_480 +401 val_401 +288 val_288 +191 val_191 +5 val_5 +244 val_244 +438 val_438 +128 val_128 +467 val_467 +432 val_432 +202 val_202 +316 val_316 +229 val_229 +469 val_469 +463 val_463 +280 val_280 +2 val_2 +35 val_35 +283 val_283 +331 val_331 +235 val_235 +80 val_80 +44 val_44 +193 val_193 +321 val_321 +335 val_335 +104 val_104 +466 val_466 +366 val_366 +175 val_175 +403 val_403 +483 val_483 +53 val_53 +105 val_105 +257 val_257 +406 val_406 +409 val_409 +190 val_190 +406 val_406 +401 val_401 +114 val_114 +258 val_258 +90 val_90 +203 val_203 +262 val_262 +348 val_348 +424 val_424 +12 val_12 +396 val_396 +201 val_201 +217 val_217 +164 val_164 +431 val_431 +454 val_454 +478 val_478 +298 val_298 +125 val_125 +431 val_431 +164 val_164 +424 val_424 +187 val_187 +382 val_382 +5 val_5 +70 val_70 +397 val_397 +480 val_480 +291 val_291 +24 val_24 +351 val_351 +255 val_255 +104 val_104 +70 val_70 +163 val_163 +438 val_438 +119 val_119 +414 val_414 +200 val_200 +491 val_491 +237 val_237 +439 val_439 +360 val_360 +248 val_248 +479 val_479 +305 val_305 +417 val_417 +199 val_199 +444 val_444 +120 val_120 +429 val_429 +169 val_169 +443 val_443 +323 val_323 +325 val_325 +277 val_277 +230 val_230 +478 val_478 +178 val_178 +468 val_468 +310 val_310 +317 val_317 +333 val_333 +493 val_493 +460 val_460 +207 val_207 +249 val_249 +265 val_265 +480 val_480 +83 val_83 +136 val_136 +353 val_353 +172 val_172 +214 val_214 +462 val_462 +233 val_233 +406 val_406 +133 val_133 +175 val_175 +189 val_189 +454 val_454 +375 val_375 +401 val_401 +421 val_421 +407 val_407 +384 val_384 +256 val_256 +26 val_26 +134 val_134 +67 val_67 +384 val_384 +379 val_379 +18 val_18 +462 val_462 +492 val_492 +100 val_100 +298 val_298 +9 val_9 +341 val_341 +498 val_498 +146 val_146 +458 val_458 +362 val_362 +186 val_186 +285 val_285 +348 val_348 +167 val_167 +18 val_18 +273 val_273 +183 val_183 +281 val_281 +344 val_344 +97 val_97 +469 val_469 +315 val_315 +84 val_84 +28 val_28 +37 val_37 +448 val_448 +152 val_152 +348 val_348 +307 val_307 +194 val_194 +414 val_414 +477 val_477 +222 val_222 +126 val_126 +90 val_90 +169 val_169 +403 val_403 +400 val_400 +200 val_200 +97 val_97 diff --git a/ql/src/test/results/clientpositive/input1.q.out b/ql/src/test/results/clientpositive/input1.q.out index 013ca194c..69eac8f6b 100644 --- a/ql/src/test/results/clientpositive/input1.q.out +++ b/ql/src/test/results/clientpositive/input1.q.out @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_DESCTABLE TEST1) + (TOK_DESCTABLE (TOK_TAB TEST1)) STAGE DEPENDENCIES: Stage-0 is a root stage diff --git a/ql/src/test/results/clientpositive/input10.q.out b/ql/src/test/results/clientpositive/input10.q.out index b172ef0b2..b9ecf7e0f 100644 --- a/ql/src/test/results/clientpositive/input10.q.out +++ b/ql/src/test/results/clientpositive/input10.q.out @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_DESCTABLE TEST10) + (TOK_DESCTABLE (TOK_TAB TEST10)) STAGE DEPENDENCIES: Stage-0 is a root stage diff --git a/ql/src/test/results/clientpositive/input11.q.out b/ql/src/test/results/clientpositive/input11.q.out index e255d9d76..995b10d7f 100644 --- a/ql/src/test/results/clientpositive/input11.q.out +++ b/ql/src/test/results/clientpositive/input11.q.out @@ -22,20 +22,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 86 val_86 @@ -122,4 +122,3 @@ STAGE PLANS: 37 val_37 90 val_90 97 val_97 - diff --git a/ql/src/test/results/clientpositive/input11_limit.q.out b/ql/src/test/results/clientpositive/input11_limit.q.out new file mode 100644 index 000000000..e84df8344 --- /dev/null +++ b/ql/src/test/results/clientpositive/input11_limit.q.out @@ -0,0 +1,51 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (< (TOK_COLREF src key) 100)) (TOK_LIMIT 10))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Filter Operator + predicate: + expr: (key < 100) + type: Boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Limit + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +86 val_86 +27 val_27 +98 val_98 +66 val_66 +37 val_37 +15 val_15 +82 val_82 +17 val_17 +0 val_0 +57 val_57 diff --git a/ql/src/test/results/clientpositive/input12.q.out b/ql/src/test/results/clientpositive/input12.q.out index 1d974668a..21282d472 100644 --- a/ql/src/test/results/clientpositive/input12.q.out +++ b/ql/src/test/results/clientpositive/input12.q.out @@ -22,10 +22,10 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Filter Operator predicate: expr: ((key >= 100) and (key < 200)) @@ -38,10 +38,10 @@ STAGE PLANS: type: string File Output Operator table: - name: dest2 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest2 Filter Operator predicate: expr: (key >= 200) @@ -52,35 +52,35 @@ STAGE PLANS: type: string File Output Operator table: - name: dest3 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest3 Stage: Stage-0 Move Operator tables: - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat replace: table: - name: dest2 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 replace: table: - name: dest3 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest2 partition: ds 2008-04-08 hr 12 replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest3 86 val_86 @@ -167,7 +167,6 @@ STAGE PLANS: 37 val_37 90 val_90 97 val_97 - 165 val_165 193 val_193 150 val_150 @@ -273,7 +272,6 @@ STAGE PLANS: 194 val_194 126 val_126 169 val_169 - 238 NULL 2008-04-08 12 311 NULL 2008-04-08 12 409 NULL 2008-04-08 12 @@ -585,4 +583,3 @@ STAGE PLANS: 403 NULL 2008-04-08 12 400 NULL 2008-04-08 12 200 NULL 2008-04-08 12 - diff --git a/ql/src/test/results/clientpositive/input13.q.out b/ql/src/test/results/clientpositive/input13.q.out index e5c13fd8a..02ed79155 100644 --- a/ql/src/test/results/clientpositive/input13.q.out +++ b/ql/src/test/results/clientpositive/input13.q.out @@ -22,10 +22,10 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Filter Operator predicate: expr: ((key >= 100) and (key < 200)) @@ -38,10 +38,10 @@ STAGE PLANS: type: string File Output Operator table: - name: dest2 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest2 Filter Operator predicate: expr: ((key >= 200) and (key < 300)) @@ -52,10 +52,10 @@ STAGE PLANS: type: string File Output Operator table: - name: dest3 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest3 Filter Operator predicate: expr: (key >= 300) @@ -71,31 +71,31 @@ STAGE PLANS: Stage: Stage-0 Move Operator + files: + hdfs directory: + destination: ../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out tables: - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat replace: table: - name: dest2 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 replace: table: - name: dest3 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest2 partition: ds 2008-04-08 hr 12 replace: - files: - destination: ../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out - hdfs directory: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest3 86 val_86 @@ -182,7 +182,6 @@ STAGE PLANS: 37 val_37 90 val_90 97 val_97 - 165 val_165 193 val_193 150 val_150 @@ -288,7 +287,6 @@ STAGE PLANS: 194 val_194 126 val_126 169 val_169 - 238 NULL 2008-04-08 12 255 NULL 2008-04-08 12 278 NULL 2008-04-08 12 @@ -392,7 +390,6 @@ STAGE PLANS: 281 NULL 2008-04-08 12 222 NULL 2008-04-08 12 200 NULL 2008-04-08 12 - val_311 val_409 val_484 diff --git a/ql/src/test/results/clientpositive/input14.q.out b/ql/src/test/results/clientpositive/input14.q.out index 588e2fa5a..e51317e72 100644 --- a/ql/src/test/results/clientpositive/input14.q.out +++ b/ql/src/test/results/clientpositive/input14.q.out @@ -8,24 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Extract - Filter Operator - predicate: - expr: (0 < 100) - type: Boolean - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: tmap:src Select Operator @@ -40,26 +22,44 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Reduce Output Operator - tag: -1 key expressions: expr: tkey type: string + # partition fields: 1 + tag: -1 value expressions: expr: tkey type: string expr: tvalue type: string - # partition fields: 1 + Reduce Operator Tree: + Extract + Filter Operator + predicate: + expr: (0 < 100) + type: Boolean + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 val_0 @@ -146,4 +146,3 @@ STAGE PLANS: 97 val_97 98 val_98 98 val_98 - diff --git a/ql/src/test/results/clientpositive/input14_limit.q.out b/ql/src/test/results/clientpositive/input14_limit.q.out new file mode 100644 index 000000000..26ca30060 --- /dev/null +++ b/ql/src/test/results/clientpositive/input14_limit.q.out @@ -0,0 +1,95 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_COLLIST (TOK_COLREF src key) (TOK_COLREF src value)) (TOK_ALIASLIST tkey tvalue) '/bin/cat'))) (TOK_CLUSTERBY tkey) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + tmap:src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + Reduce Output Operator + key expressions: + expr: tkey + type: string + # partition fields: 1 + tag: -1 + value expressions: + expr: tkey + type: string + expr: tvalue + type: string + Reduce Operator Tree: + Extract + Limit + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/945772770/1814909502.10001 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + # Reducers: 1 + Reduce Operator Tree: + Extract + Limit + Filter Operator + predicate: + expr: (0 < 100) + type: Boolean + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +0 val_0 +0 val_0 +0 val_0 +10 val_10 +11 val_11 diff --git a/ql/src/test/results/clientpositive/input15.q.out b/ql/src/test/results/clientpositive/input15.q.out index a71c107ff..973dcce48 100644 --- a/ql/src/test/results/clientpositive/input15.q.out +++ b/ql/src/test/results/clientpositive/input15.q.out @@ -8,12 +8,12 @@ STAGE PLANS: Stage: Stage-0 Create Table Operator: Create Table - isExternal: - name: TEST15 - # buckets: -1 + columns: key int, value string field delimiter: + # buckets: -1 + name: TEST15 + isExternal: isSequenceFile: - columns: key int, value string key int diff --git a/ql/src/test/results/clientpositive/input16.q.out b/ql/src/test/results/clientpositive/input16.q.out new file mode 100644 index 000000000..103b537db --- /dev/null +++ b/ql/src/test/results/clientpositive/input16.q.out @@ -0,0 +1,500 @@ +val_238 238 +val_86 86 +val_311 311 +val_27 27 +val_165 165 +val_409 409 +val_255 255 +val_278 278 +val_98 98 +val_484 484 +val_265 265 +val_193 193 +val_401 401 +val_150 150 +val_273 273 +val_224 224 +val_369 369 +val_66 66 +val_128 128 +val_213 213 +val_146 146 +val_406 406 +val_429 429 +val_374 374 +val_152 152 +val_469 469 +val_145 145 +val_495 495 +val_37 37 +val_327 327 +val_281 281 +val_277 277 +val_209 209 +val_15 15 +val_82 82 +val_403 403 +val_166 166 +val_417 417 +val_430 430 +val_252 252 +val_292 292 +val_219 219 +val_287 287 +val_153 153 +val_193 193 +val_338 338 +val_446 446 +val_459 459 +val_394 394 +val_237 237 +val_482 482 +val_174 174 +val_413 413 +val_494 494 +val_207 207 +val_199 199 +val_466 466 +val_208 208 +val_174 174 +val_399 399 +val_396 396 +val_247 247 +val_417 417 +val_489 489 +val_162 162 +val_377 377 +val_397 397 +val_309 309 +val_365 365 +val_266 266 +val_439 439 +val_342 342 +val_367 367 +val_325 325 +val_167 167 +val_195 195 +val_475 475 +val_17 17 +val_113 113 +val_155 155 +val_203 203 +val_339 339 +val_0 0 +val_455 455 +val_128 128 +val_311 311 +val_316 316 +val_57 57 +val_302 302 +val_205 205 +val_149 149 +val_438 438 +val_345 345 +val_129 129 +val_170 170 +val_20 20 +val_489 489 +val_157 157 +val_378 378 +val_221 221 +val_92 92 +val_111 111 +val_47 47 +val_72 72 +val_4 4 +val_280 280 +val_35 35 +val_427 427 +val_277 277 +val_208 208 +val_356 356 +val_399 399 +val_169 169 +val_382 382 +val_498 498 +val_125 125 +val_386 386 +val_437 437 +val_469 469 +val_192 192 +val_286 286 +val_187 187 +val_176 176 +val_54 54 +val_459 459 +val_51 51 +val_138 138 +val_103 103 +val_239 239 +val_213 213 +val_216 216 +val_430 430 +val_278 278 +val_176 176 +val_289 289 +val_221 221 +val_65 65 +val_318 318 +val_332 332 +val_311 311 +val_275 275 +val_137 137 +val_241 241 +val_83 83 +val_333 333 +val_180 180 +val_284 284 +val_12 12 +val_230 230 +val_181 181 +val_67 67 +val_260 260 +val_404 404 +val_384 384 +val_489 489 +val_353 353 +val_373 373 +val_272 272 +val_138 138 +val_217 217 +val_84 84 +val_348 348 +val_466 466 +val_58 58 +val_8 8 +val_411 411 +val_230 230 +val_208 208 +val_348 348 +val_24 24 +val_463 463 +val_431 431 +val_179 179 +val_172 172 +val_42 42 +val_129 129 +val_158 158 +val_119 119 +val_496 496 +val_0 0 +val_322 322 +val_197 197 +val_468 468 +val_393 393 +val_454 454 +val_100 100 +val_298 298 +val_199 199 +val_191 191 +val_418 418 +val_96 96 +val_26 26 +val_165 165 +val_327 327 +val_230 230 +val_205 205 +val_120 120 +val_131 131 +val_51 51 +val_404 404 +val_43 43 +val_436 436 +val_156 156 +val_469 469 +val_468 468 +val_308 308 +val_95 95 +val_196 196 +val_288 288 +val_481 481 +val_457 457 +val_98 98 +val_282 282 +val_197 197 +val_187 187 +val_318 318 +val_318 318 +val_409 409 +val_470 470 +val_137 137 +val_369 369 +val_316 316 +val_169 169 +val_413 413 +val_85 85 +val_77 77 +val_0 0 +val_490 490 +val_87 87 +val_364 364 +val_179 179 +val_118 118 +val_134 134 +val_395 395 +val_282 282 +val_138 138 +val_238 238 +val_419 419 +val_15 15 +val_118 118 +val_72 72 +val_90 90 +val_307 307 +val_19 19 +val_435 435 +val_10 10 +val_277 277 +val_273 273 +val_306 306 +val_224 224 +val_309 309 +val_389 389 +val_327 327 +val_242 242 +val_369 369 +val_392 392 +val_272 272 +val_331 331 +val_401 401 +val_242 242 +val_452 452 +val_177 177 +val_226 226 +val_5 5 +val_497 497 +val_402 402 +val_396 396 +val_317 317 +val_395 395 +val_58 58 +val_35 35 +val_336 336 +val_95 95 +val_11 11 +val_168 168 +val_34 34 +val_229 229 +val_233 233 +val_143 143 +val_472 472 +val_322 322 +val_498 498 +val_160 160 +val_195 195 +val_42 42 +val_321 321 +val_430 430 +val_119 119 +val_489 489 +val_458 458 +val_78 78 +val_76 76 +val_41 41 +val_223 223 +val_492 492 +val_149 149 +val_449 449 +val_218 218 +val_228 228 +val_138 138 +val_453 453 +val_30 30 +val_209 209 +val_64 64 +val_468 468 +val_76 76 +val_74 74 +val_342 342 +val_69 69 +val_230 230 +val_33 33 +val_368 368 +val_103 103 +val_296 296 +val_113 113 +val_216 216 +val_367 367 +val_344 344 +val_167 167 +val_274 274 +val_219 219 +val_239 239 +val_485 485 +val_116 116 +val_223 223 +val_256 256 +val_263 263 +val_70 70 +val_487 487 +val_480 480 +val_401 401 +val_288 288 +val_191 191 +val_5 5 +val_244 244 +val_438 438 +val_128 128 +val_467 467 +val_432 432 +val_202 202 +val_316 316 +val_229 229 +val_469 469 +val_463 463 +val_280 280 +val_2 2 +val_35 35 +val_283 283 +val_331 331 +val_235 235 +val_80 80 +val_44 44 +val_193 193 +val_321 321 +val_335 335 +val_104 104 +val_466 466 +val_366 366 +val_175 175 +val_403 403 +val_483 483 +val_53 53 +val_105 105 +val_257 257 +val_406 406 +val_409 409 +val_190 190 +val_406 406 +val_401 401 +val_114 114 +val_258 258 +val_90 90 +val_203 203 +val_262 262 +val_348 348 +val_424 424 +val_12 12 +val_396 396 +val_201 201 +val_217 217 +val_164 164 +val_431 431 +val_454 454 +val_478 478 +val_298 298 +val_125 125 +val_431 431 +val_164 164 +val_424 424 +val_187 187 +val_382 382 +val_5 5 +val_70 70 +val_397 397 +val_480 480 +val_291 291 +val_24 24 +val_351 351 +val_255 255 +val_104 104 +val_70 70 +val_163 163 +val_438 438 +val_119 119 +val_414 414 +val_200 200 +val_491 491 +val_237 237 +val_439 439 +val_360 360 +val_248 248 +val_479 479 +val_305 305 +val_417 417 +val_199 199 +val_444 444 +val_120 120 +val_429 429 +val_169 169 +val_443 443 +val_323 323 +val_325 325 +val_277 277 +val_230 230 +val_478 478 +val_178 178 +val_468 468 +val_310 310 +val_317 317 +val_333 333 +val_493 493 +val_460 460 +val_207 207 +val_249 249 +val_265 265 +val_480 480 +val_83 83 +val_136 136 +val_353 353 +val_172 172 +val_214 214 +val_462 462 +val_233 233 +val_406 406 +val_133 133 +val_175 175 +val_189 189 +val_454 454 +val_375 375 +val_401 401 +val_421 421 +val_407 407 +val_384 384 +val_256 256 +val_26 26 +val_134 134 +val_67 67 +val_384 384 +val_379 379 +val_18 18 +val_462 462 +val_492 492 +val_100 100 +val_298 298 +val_9 9 +val_341 341 +val_498 498 +val_146 146 +val_458 458 +val_362 362 +val_186 186 +val_285 285 +val_348 348 +val_167 167 +val_18 18 +val_273 273 +val_183 183 +val_281 281 +val_344 344 +val_97 97 +val_469 469 +val_315 315 +val_84 84 +val_28 28 +val_37 37 +val_448 448 +val_152 152 +val_348 348 +val_307 307 +val_194 194 +val_414 414 +val_477 477 +val_222 222 +val_126 126 +val_90 90 +val_169 169 +val_403 403 +val_400 400 +val_200 200 +val_97 97 diff --git a/ql/src/test/results/clientpositive/input16_cc.q.out b/ql/src/test/results/clientpositive/input16_cc.q.out new file mode 100644 index 000000000..103b537db --- /dev/null +++ b/ql/src/test/results/clientpositive/input16_cc.q.out @@ -0,0 +1,500 @@ +val_238 238 +val_86 86 +val_311 311 +val_27 27 +val_165 165 +val_409 409 +val_255 255 +val_278 278 +val_98 98 +val_484 484 +val_265 265 +val_193 193 +val_401 401 +val_150 150 +val_273 273 +val_224 224 +val_369 369 +val_66 66 +val_128 128 +val_213 213 +val_146 146 +val_406 406 +val_429 429 +val_374 374 +val_152 152 +val_469 469 +val_145 145 +val_495 495 +val_37 37 +val_327 327 +val_281 281 +val_277 277 +val_209 209 +val_15 15 +val_82 82 +val_403 403 +val_166 166 +val_417 417 +val_430 430 +val_252 252 +val_292 292 +val_219 219 +val_287 287 +val_153 153 +val_193 193 +val_338 338 +val_446 446 +val_459 459 +val_394 394 +val_237 237 +val_482 482 +val_174 174 +val_413 413 +val_494 494 +val_207 207 +val_199 199 +val_466 466 +val_208 208 +val_174 174 +val_399 399 +val_396 396 +val_247 247 +val_417 417 +val_489 489 +val_162 162 +val_377 377 +val_397 397 +val_309 309 +val_365 365 +val_266 266 +val_439 439 +val_342 342 +val_367 367 +val_325 325 +val_167 167 +val_195 195 +val_475 475 +val_17 17 +val_113 113 +val_155 155 +val_203 203 +val_339 339 +val_0 0 +val_455 455 +val_128 128 +val_311 311 +val_316 316 +val_57 57 +val_302 302 +val_205 205 +val_149 149 +val_438 438 +val_345 345 +val_129 129 +val_170 170 +val_20 20 +val_489 489 +val_157 157 +val_378 378 +val_221 221 +val_92 92 +val_111 111 +val_47 47 +val_72 72 +val_4 4 +val_280 280 +val_35 35 +val_427 427 +val_277 277 +val_208 208 +val_356 356 +val_399 399 +val_169 169 +val_382 382 +val_498 498 +val_125 125 +val_386 386 +val_437 437 +val_469 469 +val_192 192 +val_286 286 +val_187 187 +val_176 176 +val_54 54 +val_459 459 +val_51 51 +val_138 138 +val_103 103 +val_239 239 +val_213 213 +val_216 216 +val_430 430 +val_278 278 +val_176 176 +val_289 289 +val_221 221 +val_65 65 +val_318 318 +val_332 332 +val_311 311 +val_275 275 +val_137 137 +val_241 241 +val_83 83 +val_333 333 +val_180 180 +val_284 284 +val_12 12 +val_230 230 +val_181 181 +val_67 67 +val_260 260 +val_404 404 +val_384 384 +val_489 489 +val_353 353 +val_373 373 +val_272 272 +val_138 138 +val_217 217 +val_84 84 +val_348 348 +val_466 466 +val_58 58 +val_8 8 +val_411 411 +val_230 230 +val_208 208 +val_348 348 +val_24 24 +val_463 463 +val_431 431 +val_179 179 +val_172 172 +val_42 42 +val_129 129 +val_158 158 +val_119 119 +val_496 496 +val_0 0 +val_322 322 +val_197 197 +val_468 468 +val_393 393 +val_454 454 +val_100 100 +val_298 298 +val_199 199 +val_191 191 +val_418 418 +val_96 96 +val_26 26 +val_165 165 +val_327 327 +val_230 230 +val_205 205 +val_120 120 +val_131 131 +val_51 51 +val_404 404 +val_43 43 +val_436 436 +val_156 156 +val_469 469 +val_468 468 +val_308 308 +val_95 95 +val_196 196 +val_288 288 +val_481 481 +val_457 457 +val_98 98 +val_282 282 +val_197 197 +val_187 187 +val_318 318 +val_318 318 +val_409 409 +val_470 470 +val_137 137 +val_369 369 +val_316 316 +val_169 169 +val_413 413 +val_85 85 +val_77 77 +val_0 0 +val_490 490 +val_87 87 +val_364 364 +val_179 179 +val_118 118 +val_134 134 +val_395 395 +val_282 282 +val_138 138 +val_238 238 +val_419 419 +val_15 15 +val_118 118 +val_72 72 +val_90 90 +val_307 307 +val_19 19 +val_435 435 +val_10 10 +val_277 277 +val_273 273 +val_306 306 +val_224 224 +val_309 309 +val_389 389 +val_327 327 +val_242 242 +val_369 369 +val_392 392 +val_272 272 +val_331 331 +val_401 401 +val_242 242 +val_452 452 +val_177 177 +val_226 226 +val_5 5 +val_497 497 +val_402 402 +val_396 396 +val_317 317 +val_395 395 +val_58 58 +val_35 35 +val_336 336 +val_95 95 +val_11 11 +val_168 168 +val_34 34 +val_229 229 +val_233 233 +val_143 143 +val_472 472 +val_322 322 +val_498 498 +val_160 160 +val_195 195 +val_42 42 +val_321 321 +val_430 430 +val_119 119 +val_489 489 +val_458 458 +val_78 78 +val_76 76 +val_41 41 +val_223 223 +val_492 492 +val_149 149 +val_449 449 +val_218 218 +val_228 228 +val_138 138 +val_453 453 +val_30 30 +val_209 209 +val_64 64 +val_468 468 +val_76 76 +val_74 74 +val_342 342 +val_69 69 +val_230 230 +val_33 33 +val_368 368 +val_103 103 +val_296 296 +val_113 113 +val_216 216 +val_367 367 +val_344 344 +val_167 167 +val_274 274 +val_219 219 +val_239 239 +val_485 485 +val_116 116 +val_223 223 +val_256 256 +val_263 263 +val_70 70 +val_487 487 +val_480 480 +val_401 401 +val_288 288 +val_191 191 +val_5 5 +val_244 244 +val_438 438 +val_128 128 +val_467 467 +val_432 432 +val_202 202 +val_316 316 +val_229 229 +val_469 469 +val_463 463 +val_280 280 +val_2 2 +val_35 35 +val_283 283 +val_331 331 +val_235 235 +val_80 80 +val_44 44 +val_193 193 +val_321 321 +val_335 335 +val_104 104 +val_466 466 +val_366 366 +val_175 175 +val_403 403 +val_483 483 +val_53 53 +val_105 105 +val_257 257 +val_406 406 +val_409 409 +val_190 190 +val_406 406 +val_401 401 +val_114 114 +val_258 258 +val_90 90 +val_203 203 +val_262 262 +val_348 348 +val_424 424 +val_12 12 +val_396 396 +val_201 201 +val_217 217 +val_164 164 +val_431 431 +val_454 454 +val_478 478 +val_298 298 +val_125 125 +val_431 431 +val_164 164 +val_424 424 +val_187 187 +val_382 382 +val_5 5 +val_70 70 +val_397 397 +val_480 480 +val_291 291 +val_24 24 +val_351 351 +val_255 255 +val_104 104 +val_70 70 +val_163 163 +val_438 438 +val_119 119 +val_414 414 +val_200 200 +val_491 491 +val_237 237 +val_439 439 +val_360 360 +val_248 248 +val_479 479 +val_305 305 +val_417 417 +val_199 199 +val_444 444 +val_120 120 +val_429 429 +val_169 169 +val_443 443 +val_323 323 +val_325 325 +val_277 277 +val_230 230 +val_478 478 +val_178 178 +val_468 468 +val_310 310 +val_317 317 +val_333 333 +val_493 493 +val_460 460 +val_207 207 +val_249 249 +val_265 265 +val_480 480 +val_83 83 +val_136 136 +val_353 353 +val_172 172 +val_214 214 +val_462 462 +val_233 233 +val_406 406 +val_133 133 +val_175 175 +val_189 189 +val_454 454 +val_375 375 +val_401 401 +val_421 421 +val_407 407 +val_384 384 +val_256 256 +val_26 26 +val_134 134 +val_67 67 +val_384 384 +val_379 379 +val_18 18 +val_462 462 +val_492 492 +val_100 100 +val_298 298 +val_9 9 +val_341 341 +val_498 498 +val_146 146 +val_458 458 +val_362 362 +val_186 186 +val_285 285 +val_348 348 +val_167 167 +val_18 18 +val_273 273 +val_183 183 +val_281 281 +val_344 344 +val_97 97 +val_469 469 +val_315 315 +val_84 84 +val_28 28 +val_37 37 +val_448 448 +val_152 152 +val_348 348 +val_307 307 +val_194 194 +val_414 414 +val_477 477 +val_222 222 +val_126 126 +val_90 90 +val_169 169 +val_403 403 +val_400 400 +val_200 200 +val_97 97 diff --git a/ql/src/test/results/clientpositive/input1_limit.q.out b/ql/src/test/results/clientpositive/input1_limit.q.out new file mode 100644 index 000000000..5cf0de904 --- /dev/null +++ b/ql/src/test/results/clientpositive/input1_limit.q.out @@ -0,0 +1,79 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (< (TOK_COLREF src key) 100)) (TOK_LIMIT 10)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (< (TOK_COLREF src key) 100)) (TOK_LIMIT 5))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Filter Operator + predicate: + expr: (key < 100) + type: Boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Limit + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + Filter Operator + predicate: + expr: (key < 100) + type: Boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Limit + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest2 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest2 + + +86 val_86 +27 val_27 +98 val_98 +66 val_66 +37 val_37 +15 val_15 +82 val_82 +17 val_17 +0 val_0 +57 val_57 +86 val_86 +27 val_27 +98 val_98 +66 val_66 +37 val_37 diff --git a/ql/src/test/results/clientpositive/input2_limit.q.out b/ql/src/test/results/clientpositive/input2_limit.q.out new file mode 100644 index 000000000..443dc4f4b --- /dev/null +++ b/ql/src/test/results/clientpositive/input2_limit.q.out @@ -0,0 +1,38 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (< (TOK_COLREF x key) 300)) (TOK_LIMIT 5))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + x + Filter Operator + predicate: + expr: (key < 300) + type: Boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Limit + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: 5 + + +238 val_238 +86 val_86 +27 val_27 +165 val_165 +255 val_255 diff --git a/ql/src/test/results/clientpositive/input3.q.out b/ql/src/test/results/clientpositive/input3.q.out index 90620b22f..d366c9ec3 100644 --- a/ql/src/test/results/clientpositive/input3.q.out +++ b/ql/src/test/results/clientpositive/input3.q.out @@ -14,9 +14,9 @@ STAGE PLANS: Stage: Stage-0 Alter Table Operator: Alter Table - old name: TEST3b type: add columns - new columns: a array, b float, c map, X float + new columns: X float + old name: TEST3b a array @@ -33,9 +33,9 @@ STAGE PLANS: Stage: Stage-0 Alter Table Operator: Alter Table - old name: TEST3b - new name: TEST3c type: rename + new name: TEST3c + old name: TEST3b a array @@ -43,3 +43,22 @@ b float c map x float src src1 src_sequencefile src_thrift srcbucket srcpart test3a test3c +ABSTRACT SYNTAX TREE: + (TOK_ALTERTABLE_REPLACECOLS TEST3c (TOK_TABCOLLIST (TOK_TABCOL R1 TOK_INT) (TOK_TABCOL R2 TOK_FLOAT))) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Alter Table Operator: + Alter Table + type: replace columns + new columns: R1 int, R2 float + old name: TEST3c + + +r1 int +r2 float +Detailed Table Information: +Table(tableName:test3c,dbName:default,owner:njain,createTime:1224285029,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:r1,type:int,comment:null), FieldSchema(name:r2,type:float,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/test3b,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=njain,last_modified_time=1224285029,SORTBUCKETCOLSPREFIX=TRUE}) diff --git a/ql/src/test/results/clientpositive/input4.q.out b/ql/src/test/results/clientpositive/input4.q.out index 4f1d65c66..21d07fafe 100644 --- a/ql/src/test/results/clientpositive/input4.q.out +++ b/ql/src/test/results/clientpositive/input4.q.out @@ -8,18 +8,18 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Copy - source: file:/data/zshao/trunk/VENDOR/hadoop-0.17/src/contrib/hive/data/files/kv1.txt - destination: file:/tmp/hive-zshao/-1658380790 + source: file:/home/njain/workspace/hadoop-0.17/src/contrib/hive/data/files/kv1.txt + destination: file:/tmp/hive-njain/-478939701 Stage: Stage-1 Move Operator tables: + replace: table: - name: input4 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: input4 val_238 238 @@ -522,4 +522,3 @@ val_403 403 val_400 400 val_200 200 val_97 97 - diff --git a/ql/src/test/results/clientpositive/input4_cb_delim.q.out b/ql/src/test/results/clientpositive/input4_cb_delim.q.out new file mode 100644 index 000000000..103b537db --- /dev/null +++ b/ql/src/test/results/clientpositive/input4_cb_delim.q.out @@ -0,0 +1,500 @@ +val_238 238 +val_86 86 +val_311 311 +val_27 27 +val_165 165 +val_409 409 +val_255 255 +val_278 278 +val_98 98 +val_484 484 +val_265 265 +val_193 193 +val_401 401 +val_150 150 +val_273 273 +val_224 224 +val_369 369 +val_66 66 +val_128 128 +val_213 213 +val_146 146 +val_406 406 +val_429 429 +val_374 374 +val_152 152 +val_469 469 +val_145 145 +val_495 495 +val_37 37 +val_327 327 +val_281 281 +val_277 277 +val_209 209 +val_15 15 +val_82 82 +val_403 403 +val_166 166 +val_417 417 +val_430 430 +val_252 252 +val_292 292 +val_219 219 +val_287 287 +val_153 153 +val_193 193 +val_338 338 +val_446 446 +val_459 459 +val_394 394 +val_237 237 +val_482 482 +val_174 174 +val_413 413 +val_494 494 +val_207 207 +val_199 199 +val_466 466 +val_208 208 +val_174 174 +val_399 399 +val_396 396 +val_247 247 +val_417 417 +val_489 489 +val_162 162 +val_377 377 +val_397 397 +val_309 309 +val_365 365 +val_266 266 +val_439 439 +val_342 342 +val_367 367 +val_325 325 +val_167 167 +val_195 195 +val_475 475 +val_17 17 +val_113 113 +val_155 155 +val_203 203 +val_339 339 +val_0 0 +val_455 455 +val_128 128 +val_311 311 +val_316 316 +val_57 57 +val_302 302 +val_205 205 +val_149 149 +val_438 438 +val_345 345 +val_129 129 +val_170 170 +val_20 20 +val_489 489 +val_157 157 +val_378 378 +val_221 221 +val_92 92 +val_111 111 +val_47 47 +val_72 72 +val_4 4 +val_280 280 +val_35 35 +val_427 427 +val_277 277 +val_208 208 +val_356 356 +val_399 399 +val_169 169 +val_382 382 +val_498 498 +val_125 125 +val_386 386 +val_437 437 +val_469 469 +val_192 192 +val_286 286 +val_187 187 +val_176 176 +val_54 54 +val_459 459 +val_51 51 +val_138 138 +val_103 103 +val_239 239 +val_213 213 +val_216 216 +val_430 430 +val_278 278 +val_176 176 +val_289 289 +val_221 221 +val_65 65 +val_318 318 +val_332 332 +val_311 311 +val_275 275 +val_137 137 +val_241 241 +val_83 83 +val_333 333 +val_180 180 +val_284 284 +val_12 12 +val_230 230 +val_181 181 +val_67 67 +val_260 260 +val_404 404 +val_384 384 +val_489 489 +val_353 353 +val_373 373 +val_272 272 +val_138 138 +val_217 217 +val_84 84 +val_348 348 +val_466 466 +val_58 58 +val_8 8 +val_411 411 +val_230 230 +val_208 208 +val_348 348 +val_24 24 +val_463 463 +val_431 431 +val_179 179 +val_172 172 +val_42 42 +val_129 129 +val_158 158 +val_119 119 +val_496 496 +val_0 0 +val_322 322 +val_197 197 +val_468 468 +val_393 393 +val_454 454 +val_100 100 +val_298 298 +val_199 199 +val_191 191 +val_418 418 +val_96 96 +val_26 26 +val_165 165 +val_327 327 +val_230 230 +val_205 205 +val_120 120 +val_131 131 +val_51 51 +val_404 404 +val_43 43 +val_436 436 +val_156 156 +val_469 469 +val_468 468 +val_308 308 +val_95 95 +val_196 196 +val_288 288 +val_481 481 +val_457 457 +val_98 98 +val_282 282 +val_197 197 +val_187 187 +val_318 318 +val_318 318 +val_409 409 +val_470 470 +val_137 137 +val_369 369 +val_316 316 +val_169 169 +val_413 413 +val_85 85 +val_77 77 +val_0 0 +val_490 490 +val_87 87 +val_364 364 +val_179 179 +val_118 118 +val_134 134 +val_395 395 +val_282 282 +val_138 138 +val_238 238 +val_419 419 +val_15 15 +val_118 118 +val_72 72 +val_90 90 +val_307 307 +val_19 19 +val_435 435 +val_10 10 +val_277 277 +val_273 273 +val_306 306 +val_224 224 +val_309 309 +val_389 389 +val_327 327 +val_242 242 +val_369 369 +val_392 392 +val_272 272 +val_331 331 +val_401 401 +val_242 242 +val_452 452 +val_177 177 +val_226 226 +val_5 5 +val_497 497 +val_402 402 +val_396 396 +val_317 317 +val_395 395 +val_58 58 +val_35 35 +val_336 336 +val_95 95 +val_11 11 +val_168 168 +val_34 34 +val_229 229 +val_233 233 +val_143 143 +val_472 472 +val_322 322 +val_498 498 +val_160 160 +val_195 195 +val_42 42 +val_321 321 +val_430 430 +val_119 119 +val_489 489 +val_458 458 +val_78 78 +val_76 76 +val_41 41 +val_223 223 +val_492 492 +val_149 149 +val_449 449 +val_218 218 +val_228 228 +val_138 138 +val_453 453 +val_30 30 +val_209 209 +val_64 64 +val_468 468 +val_76 76 +val_74 74 +val_342 342 +val_69 69 +val_230 230 +val_33 33 +val_368 368 +val_103 103 +val_296 296 +val_113 113 +val_216 216 +val_367 367 +val_344 344 +val_167 167 +val_274 274 +val_219 219 +val_239 239 +val_485 485 +val_116 116 +val_223 223 +val_256 256 +val_263 263 +val_70 70 +val_487 487 +val_480 480 +val_401 401 +val_288 288 +val_191 191 +val_5 5 +val_244 244 +val_438 438 +val_128 128 +val_467 467 +val_432 432 +val_202 202 +val_316 316 +val_229 229 +val_469 469 +val_463 463 +val_280 280 +val_2 2 +val_35 35 +val_283 283 +val_331 331 +val_235 235 +val_80 80 +val_44 44 +val_193 193 +val_321 321 +val_335 335 +val_104 104 +val_466 466 +val_366 366 +val_175 175 +val_403 403 +val_483 483 +val_53 53 +val_105 105 +val_257 257 +val_406 406 +val_409 409 +val_190 190 +val_406 406 +val_401 401 +val_114 114 +val_258 258 +val_90 90 +val_203 203 +val_262 262 +val_348 348 +val_424 424 +val_12 12 +val_396 396 +val_201 201 +val_217 217 +val_164 164 +val_431 431 +val_454 454 +val_478 478 +val_298 298 +val_125 125 +val_431 431 +val_164 164 +val_424 424 +val_187 187 +val_382 382 +val_5 5 +val_70 70 +val_397 397 +val_480 480 +val_291 291 +val_24 24 +val_351 351 +val_255 255 +val_104 104 +val_70 70 +val_163 163 +val_438 438 +val_119 119 +val_414 414 +val_200 200 +val_491 491 +val_237 237 +val_439 439 +val_360 360 +val_248 248 +val_479 479 +val_305 305 +val_417 417 +val_199 199 +val_444 444 +val_120 120 +val_429 429 +val_169 169 +val_443 443 +val_323 323 +val_325 325 +val_277 277 +val_230 230 +val_478 478 +val_178 178 +val_468 468 +val_310 310 +val_317 317 +val_333 333 +val_493 493 +val_460 460 +val_207 207 +val_249 249 +val_265 265 +val_480 480 +val_83 83 +val_136 136 +val_353 353 +val_172 172 +val_214 214 +val_462 462 +val_233 233 +val_406 406 +val_133 133 +val_175 175 +val_189 189 +val_454 454 +val_375 375 +val_401 401 +val_421 421 +val_407 407 +val_384 384 +val_256 256 +val_26 26 +val_134 134 +val_67 67 +val_384 384 +val_379 379 +val_18 18 +val_462 462 +val_492 492 +val_100 100 +val_298 298 +val_9 9 +val_341 341 +val_498 498 +val_146 146 +val_458 458 +val_362 362 +val_186 186 +val_285 285 +val_348 348 +val_167 167 +val_18 18 +val_273 273 +val_183 183 +val_281 281 +val_344 344 +val_97 97 +val_469 469 +val_315 315 +val_84 84 +val_28 28 +val_37 37 +val_448 448 +val_152 152 +val_348 348 +val_307 307 +val_194 194 +val_414 414 +val_477 477 +val_222 222 +val_126 126 +val_90 90 +val_169 169 +val_403 403 +val_400 400 +val_200 200 +val_97 97 diff --git a/ql/src/test/results/clientpositive/input5.q.out b/ql/src/test/results/clientpositive/input5.q.out index bbd2f6b96..f4d95f06c 100644 --- a/ql/src/test/results/clientpositive/input5.q.out +++ b/ql/src/test/results/clientpositive/input5.q.out @@ -8,20 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Extract - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: tmap:src_thrift Select Operator @@ -36,26 +22,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Reduce Output Operator - tag: -1 key expressions: expr: tkey type: string + # partition fields: 1 + tag: -1 value expressions: expr: tkey type: string expr: tvalue type: string - # partition fields: 1 + Reduce Operator Tree: + Extract + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 [0,0,0] [{"myint":0,"mystring":"0"}] @@ -68,4 +68,3 @@ STAGE PLANS: [7,14,21] [{"myint":49,"mystring":"343"}] [8,16,24] [{"myint":64,"mystring":"512"}] [9,18,27] [{"myint":81,"mystring":"729"}] - diff --git a/ql/src/test/results/clientpositive/input6.q.out b/ql/src/test/results/clientpositive/input6.q.out index 724d98da6..bc233db1d 100644 --- a/ql/src/test/results/clientpositive/input6.q.out +++ b/ql/src/test/results/clientpositive/input6.q.out @@ -22,19 +22,19 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 diff --git a/ql/src/test/results/clientpositive/input7.q.out b/ql/src/test/results/clientpositive/input7.q.out index 0c1c6f842..08f63f11e 100644 --- a/ql/src/test/results/clientpositive/input7.q.out +++ b/ql/src/test/results/clientpositive/input7.q.out @@ -18,34 +18,34 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 NULL 238 -NULL NULL +NULL NULL 311 -NULL NULL -NULL NULL -NULL NULL +NULL +NULL +NULL NULL 255 NULL 278 NULL 98 -NULL NULL -NULL NULL -NULL NULL +NULL +NULL +NULL NULL 401 NULL 150 NULL 273 @@ -56,7 +56,6 @@ NULL 128 NULL 213 NULL 146 NULL 406 -NULL NULL -NULL NULL -NULL NULL - +NULL +NULL +NULL diff --git a/ql/src/test/results/clientpositive/input8.q.out b/ql/src/test/results/clientpositive/input8.q.out index ea2d8353c..ab7085211 100644 --- a/ql/src/test/results/clientpositive/input8.q.out +++ b/ql/src/test/results/clientpositive/input8.q.out @@ -20,20 +20,20 @@ STAGE PLANS: type: Byte File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 NULL NULL NULL @@ -61,4 +61,3 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL - diff --git a/ql/src/test/results/clientpositive/input9.q.out b/ql/src/test/results/clientpositive/input9.q.out index 3401258f3..b7d4a4944 100644 --- a/ql/src/test/results/clientpositive/input9.q.out +++ b/ql/src/test/results/clientpositive/input9.q.out @@ -22,19 +22,19 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 diff --git a/ql/src/test/results/clientpositive/input_dfs.q.out b/ql/src/test/results/clientpositive/input_dfs.q.out new file mode 100644 index 000000000..9825414ec --- /dev/null +++ b/ql/src/test/results/clientpositive/input_dfs.q.out @@ -0,0 +1,500 @@ +238val_238 +86val_86 +311val_311 +27val_27 +165val_165 +409val_409 +255val_255 +278val_278 +98val_98 +484val_484 +265val_265 +193val_193 +401val_401 +150val_150 +273val_273 +224val_224 +369val_369 +66val_66 +128val_128 +213val_213 +146val_146 +406val_406 +429val_429 +374val_374 +152val_152 +469val_469 +145val_145 +495val_495 +37val_37 +327val_327 +281val_281 +277val_277 +209val_209 +15val_15 +82val_82 +403val_403 +166val_166 +417val_417 +430val_430 +252val_252 +292val_292 +219val_219 +287val_287 +153val_153 +193val_193 +338val_338 +446val_446 +459val_459 +394val_394 +237val_237 +482val_482 +174val_174 +413val_413 +494val_494 +207val_207 +199val_199 +466val_466 +208val_208 +174val_174 +399val_399 +396val_396 +247val_247 +417val_417 +489val_489 +162val_162 +377val_377 +397val_397 +309val_309 +365val_365 +266val_266 +439val_439 +342val_342 +367val_367 +325val_325 +167val_167 +195val_195 +475val_475 +17val_17 +113val_113 +155val_155 +203val_203 +339val_339 +0val_0 +455val_455 +128val_128 +311val_311 +316val_316 +57val_57 +302val_302 +205val_205 +149val_149 +438val_438 +345val_345 +129val_129 +170val_170 +20val_20 +489val_489 +157val_157 +378val_378 +221val_221 +92val_92 +111val_111 +47val_47 +72val_72 +4val_4 +280val_280 +35val_35 +427val_427 +277val_277 +208val_208 +356val_356 +399val_399 +169val_169 +382val_382 +498val_498 +125val_125 +386val_386 +437val_437 +469val_469 +192val_192 +286val_286 +187val_187 +176val_176 +54val_54 +459val_459 +51val_51 +138val_138 +103val_103 +239val_239 +213val_213 +216val_216 +430val_430 +278val_278 +176val_176 +289val_289 +221val_221 +65val_65 +318val_318 +332val_332 +311val_311 +275val_275 +137val_137 +241val_241 +83val_83 +333val_333 +180val_180 +284val_284 +12val_12 +230val_230 +181val_181 +67val_67 +260val_260 +404val_404 +384val_384 +489val_489 +353val_353 +373val_373 +272val_272 +138val_138 +217val_217 +84val_84 +348val_348 +466val_466 +58val_58 +8val_8 +411val_411 +230val_230 +208val_208 +348val_348 +24val_24 +463val_463 +431val_431 +179val_179 +172val_172 +42val_42 +129val_129 +158val_158 +119val_119 +496val_496 +0val_0 +322val_322 +197val_197 +468val_468 +393val_393 +454val_454 +100val_100 +298val_298 +199val_199 +191val_191 +418val_418 +96val_96 +26val_26 +165val_165 +327val_327 +230val_230 +205val_205 +120val_120 +131val_131 +51val_51 +404val_404 +43val_43 +436val_436 +156val_156 +469val_469 +468val_468 +308val_308 +95val_95 +196val_196 +288val_288 +481val_481 +457val_457 +98val_98 +282val_282 +197val_197 +187val_187 +318val_318 +318val_318 +409val_409 +470val_470 +137val_137 +369val_369 +316val_316 +169val_169 +413val_413 +85val_85 +77val_77 +0val_0 +490val_490 +87val_87 +364val_364 +179val_179 +118val_118 +134val_134 +395val_395 +282val_282 +138val_138 +238val_238 +419val_419 +15val_15 +118val_118 +72val_72 +90val_90 +307val_307 +19val_19 +435val_435 +10val_10 +277val_277 +273val_273 +306val_306 +224val_224 +309val_309 +389val_389 +327val_327 +242val_242 +369val_369 +392val_392 +272val_272 +331val_331 +401val_401 +242val_242 +452val_452 +177val_177 +226val_226 +5val_5 +497val_497 +402val_402 +396val_396 +317val_317 +395val_395 +58val_58 +35val_35 +336val_336 +95val_95 +11val_11 +168val_168 +34val_34 +229val_229 +233val_233 +143val_143 +472val_472 +322val_322 +498val_498 +160val_160 +195val_195 +42val_42 +321val_321 +430val_430 +119val_119 +489val_489 +458val_458 +78val_78 +76val_76 +41val_41 +223val_223 +492val_492 +149val_149 +449val_449 +218val_218 +228val_228 +138val_138 +453val_453 +30val_30 +209val_209 +64val_64 +468val_468 +76val_76 +74val_74 +342val_342 +69val_69 +230val_230 +33val_33 +368val_368 +103val_103 +296val_296 +113val_113 +216val_216 +367val_367 +344val_344 +167val_167 +274val_274 +219val_219 +239val_239 +485val_485 +116val_116 +223val_223 +256val_256 +263val_263 +70val_70 +487val_487 +480val_480 +401val_401 +288val_288 +191val_191 +5val_5 +244val_244 +438val_438 +128val_128 +467val_467 +432val_432 +202val_202 +316val_316 +229val_229 +469val_469 +463val_463 +280val_280 +2val_2 +35val_35 +283val_283 +331val_331 +235val_235 +80val_80 +44val_44 +193val_193 +321val_321 +335val_335 +104val_104 +466val_466 +366val_366 +175val_175 +403val_403 +483val_483 +53val_53 +105val_105 +257val_257 +406val_406 +409val_409 +190val_190 +406val_406 +401val_401 +114val_114 +258val_258 +90val_90 +203val_203 +262val_262 +348val_348 +424val_424 +12val_12 +396val_396 +201val_201 +217val_217 +164val_164 +431val_431 +454val_454 +478val_478 +298val_298 +125val_125 +431val_431 +164val_164 +424val_424 +187val_187 +382val_382 +5val_5 +70val_70 +397val_397 +480val_480 +291val_291 +24val_24 +351val_351 +255val_255 +104val_104 +70val_70 +163val_163 +438val_438 +119val_119 +414val_414 +200val_200 +491val_491 +237val_237 +439val_439 +360val_360 +248val_248 +479val_479 +305val_305 +417val_417 +199val_199 +444val_444 +120val_120 +429val_429 +169val_169 +443val_443 +323val_323 +325val_325 +277val_277 +230val_230 +478val_478 +178val_178 +468val_468 +310val_310 +317val_317 +333val_333 +493val_493 +460val_460 +207val_207 +249val_249 +265val_265 +480val_480 +83val_83 +136val_136 +353val_353 +172val_172 +214val_214 +462val_462 +233val_233 +406val_406 +133val_133 +175val_175 +189val_189 +454val_454 +375val_375 +401val_401 +421val_421 +407val_407 +384val_384 +256val_256 +26val_26 +134val_134 +67val_67 +384val_384 +379val_379 +18val_18 +462val_462 +492val_492 +100val_100 +298val_298 +9val_9 +341val_341 +498val_498 +146val_146 +458val_458 +362val_362 +186val_186 +285val_285 +348val_348 +167val_167 +18val_18 +273val_273 +183val_183 +281val_281 +344val_344 +97val_97 +469val_469 +315val_315 +84val_84 +28val_28 +37val_37 +448val_448 +152val_152 +348val_348 +307val_307 +194val_194 +414val_414 +477val_477 +222val_222 +126val_126 +90val_90 +169val_169 +403val_403 +400val_400 +200val_200 +97val_97 diff --git a/ql/src/test/results/clientpositive/input_dynamicserde.q.out b/ql/src/test/results/clientpositive/input_dynamicserde.q.out new file mode 100644 index 000000000..33f10961e --- /dev/null +++ b/ql/src/test/results/clientpositive/input_dynamicserde.q.out @@ -0,0 +1,52 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src_thrift lint)) (TOK_SELEXPR (TOK_COLREF src_thrift lstring)) (TOK_SELEXPR (TOK_COLREF src_thrift mstringstring)) (TOK_SELEXPR (TOK_COLREF src_thrift aint)) (TOK_SELEXPR (TOK_COLREF src_thrift astring))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src_thrift + Select Operator + expressions: + expr: lint + type: array + expr: lstring + type: array + expr: mstringstring + type: map + expr: aint + type: int + expr: astring + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + name: dest1 + + +[0,0,0] ["0","0","0"] {"key_0":"value_0"} -1220068486 record_0 +[1,2,3] ["10","100","1000"] {"key_1":"value_1"} -1147582750 record_1 +[2,4,6] ["20","200","2000"] {"key_2":"value_2"} -2091002570 record_2 +[3,6,9] ["30","300","3000"] {"key_3":"value_3"} -1587372273 record_3 +[4,8,12] ["40","400","4000"] {"key_4":"value_4"} -240543265 record_4 +[5,10,15] ["50","500","5000"] {"key_5":"value_5"} 1914724537 record_5 +[6,12,18] ["60","600","6000"] {"key_6":"value_6"} -1281615210 record_6 +[7,14,21] ["70","700","7000"] {"key_7":"value_7"} 1539139264 record_7 +[8,16,24] ["80","800","8000"] {"key_8":"value_8"} -1103622763 record_8 +[9,18,27] ["90","900","9000"] {"key_9":"value_9"} -1883609167 record_9 diff --git a/ql/src/test/results/clientpositive/input_limit.q.out b/ql/src/test/results/clientpositive/input_limit.q.out new file mode 100644 index 000000000..f79636b75 --- /dev/null +++ b/ql/src/test/results/clientpositive/input_limit.q.out @@ -0,0 +1,32 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_LIMIT 20))) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 20 + + +238 val_238 +86 val_86 +311 val_311 +27 val_27 +165 val_165 +409 val_409 +255 val_255 +278 val_278 +98 val_98 +484 val_484 +265 val_265 +193 val_193 +401 val_401 +150 val_150 +273 val_273 +224 val_224 +369 val_369 +66 val_66 +128 val_128 +213 val_213 diff --git a/ql/src/test/results/clientpositive/input_part1.q.out b/ql/src/test/results/clientpositive/input_part1.q.out index ad970c815..68aa10848 100644 --- a/ql/src/test/results/clientpositive/input_part1.q.out +++ b/ql/src/test/results/clientpositive/input_part1.q.out @@ -25,21 +25,70 @@ STAGE PLANS: expr: ds type: string File Output Operator + directory: /tmp/hive-njain/13222008/293248514.10000.insclause-0 table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest1 + serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} + serialization.format 1 + columns key,value,hr,ds + SORTBUCKETCOLSPREFIX TRUE + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcpart + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcpart Stage: Stage-0 Move Operator tables: + replace: + source: /tmp/hive-njain/13222008/293248514.10000.insclause-0 table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + properties: + name dest1 + serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} + serialization.format 1 + columns key,value,hr,ds + SORTBUCKETCOLSPREFIX TRUE + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 86 val_86 12 2008-04-08 @@ -126,4 +175,3 @@ STAGE PLANS: 37 val_37 12 2008-04-08 90 val_90 12 2008-04-08 97 val_97 12 2008-04-08 - diff --git a/ql/src/test/results/clientpositive/input_part2.q.out b/ql/src/test/results/clientpositive/input_part2.q.out new file mode 100644 index 000000000..394854b79 --- /dev/null +++ b/ql/src/test/results/clientpositive/input_part2.q.out @@ -0,0 +1,333 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF srcpart key)) (TOK_SELEXPR (TOK_COLREF srcpart value)) (TOK_SELEXPR (TOK_COLREF srcpart hr)) (TOK_SELEXPR (TOK_COLREF srcpart ds))) (TOK_WHERE (and (and (< (TOK_COLREF srcpart key) 100) (= (TOK_COLREF srcpart ds) '2008-04-08')) (= (TOK_COLREF srcpart hr) '12')))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest2)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF srcpart key)) (TOK_SELEXPR (TOK_COLREF srcpart value)) (TOK_SELEXPR (TOK_COLREF srcpart hr)) (TOK_SELEXPR (TOK_COLREF srcpart ds))) (TOK_WHERE (and (and (< (TOK_COLREF srcpart key) 100) (= (TOK_COLREF srcpart ds) '2008-04-09')) (= (TOK_COLREF srcpart hr) '12'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + srcpart + Filter Operator + predicate: + expr: (((key < 100) and (ds = '2008-04-08')) and (hr = '12')) + type: Boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + expr: ds + type: string + File Output Operator + directory: /tmp/hive-njain/925269835/11972654.10000.insclause-0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest1 + serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} + serialization.format 1 + columns key,value,hr,ds + SORTBUCKETCOLSPREFIX TRUE + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + Filter Operator + predicate: + expr: (((key < 100) and (ds = '2008-04-09')) and (hr = '12')) + type: Boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + expr: ds + type: string + File Output Operator + directory: /tmp/hive-njain/925269835/11972654.10001.insclause-1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest2 + serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} + serialization.format 1 + columns key,value,hr,ds + SORTBUCKETCOLSPREFIX TRUE + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest2 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest2 + Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcpart + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcpart + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + Partition + partition values: + ds 2008-04-09 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcpart + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcpart + + Stage: Stage-0 + Move Operator + tables: + replace: + source: /tmp/hive-njain/925269835/11972654.10000.insclause-0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest1 + serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} + serialization.format 1 + columns key,value,hr,ds + SORTBUCKETCOLSPREFIX TRUE + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + replace: + source: /tmp/hive-njain/925269835/11972654.10001.insclause-1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest2 + serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} + serialization.format 1 + columns key,value,hr,ds + SORTBUCKETCOLSPREFIX TRUE + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest2 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest2 + + +86 val_86 12 2008-04-08 +27 val_27 12 2008-04-08 +98 val_98 12 2008-04-08 +66 val_66 12 2008-04-08 +37 val_37 12 2008-04-08 +15 val_15 12 2008-04-08 +82 val_82 12 2008-04-08 +17 val_17 12 2008-04-08 +0 val_0 12 2008-04-08 +57 val_57 12 2008-04-08 +20 val_20 12 2008-04-08 +92 val_92 12 2008-04-08 +47 val_47 12 2008-04-08 +72 val_72 12 2008-04-08 +4 val_4 12 2008-04-08 +35 val_35 12 2008-04-08 +54 val_54 12 2008-04-08 +51 val_51 12 2008-04-08 +65 val_65 12 2008-04-08 +83 val_83 12 2008-04-08 +12 val_12 12 2008-04-08 +67 val_67 12 2008-04-08 +84 val_84 12 2008-04-08 +58 val_58 12 2008-04-08 +8 val_8 12 2008-04-08 +24 val_24 12 2008-04-08 +42 val_42 12 2008-04-08 +0 val_0 12 2008-04-08 +96 val_96 12 2008-04-08 +26 val_26 12 2008-04-08 +51 val_51 12 2008-04-08 +43 val_43 12 2008-04-08 +95 val_95 12 2008-04-08 +98 val_98 12 2008-04-08 +85 val_85 12 2008-04-08 +77 val_77 12 2008-04-08 +0 val_0 12 2008-04-08 +87 val_87 12 2008-04-08 +15 val_15 12 2008-04-08 +72 val_72 12 2008-04-08 +90 val_90 12 2008-04-08 +19 val_19 12 2008-04-08 +10 val_10 12 2008-04-08 +5 val_5 12 2008-04-08 +58 val_58 12 2008-04-08 +35 val_35 12 2008-04-08 +95 val_95 12 2008-04-08 +11 val_11 12 2008-04-08 +34 val_34 12 2008-04-08 +42 val_42 12 2008-04-08 +78 val_78 12 2008-04-08 +76 val_76 12 2008-04-08 +41 val_41 12 2008-04-08 +30 val_30 12 2008-04-08 +64 val_64 12 2008-04-08 +76 val_76 12 2008-04-08 +74 val_74 12 2008-04-08 +69 val_69 12 2008-04-08 +33 val_33 12 2008-04-08 +70 val_70 12 2008-04-08 +5 val_5 12 2008-04-08 +2 val_2 12 2008-04-08 +35 val_35 12 2008-04-08 +80 val_80 12 2008-04-08 +44 val_44 12 2008-04-08 +53 val_53 12 2008-04-08 +90 val_90 12 2008-04-08 +12 val_12 12 2008-04-08 +5 val_5 12 2008-04-08 +70 val_70 12 2008-04-08 +24 val_24 12 2008-04-08 +70 val_70 12 2008-04-08 +83 val_83 12 2008-04-08 +26 val_26 12 2008-04-08 +67 val_67 12 2008-04-08 +18 val_18 12 2008-04-08 +9 val_9 12 2008-04-08 +18 val_18 12 2008-04-08 +97 val_97 12 2008-04-08 +84 val_84 12 2008-04-08 +28 val_28 12 2008-04-08 +37 val_37 12 2008-04-08 +90 val_90 12 2008-04-08 +97 val_97 12 2008-04-08 +86 val_86 12 2008-04-09 +27 val_27 12 2008-04-09 +98 val_98 12 2008-04-09 +66 val_66 12 2008-04-09 +37 val_37 12 2008-04-09 +15 val_15 12 2008-04-09 +82 val_82 12 2008-04-09 +17 val_17 12 2008-04-09 +0 val_0 12 2008-04-09 +57 val_57 12 2008-04-09 +20 val_20 12 2008-04-09 +92 val_92 12 2008-04-09 +47 val_47 12 2008-04-09 +72 val_72 12 2008-04-09 +4 val_4 12 2008-04-09 +35 val_35 12 2008-04-09 +54 val_54 12 2008-04-09 +51 val_51 12 2008-04-09 +65 val_65 12 2008-04-09 +83 val_83 12 2008-04-09 +12 val_12 12 2008-04-09 +67 val_67 12 2008-04-09 +84 val_84 12 2008-04-09 +58 val_58 12 2008-04-09 +8 val_8 12 2008-04-09 +24 val_24 12 2008-04-09 +42 val_42 12 2008-04-09 +0 val_0 12 2008-04-09 +96 val_96 12 2008-04-09 +26 val_26 12 2008-04-09 +51 val_51 12 2008-04-09 +43 val_43 12 2008-04-09 +95 val_95 12 2008-04-09 +98 val_98 12 2008-04-09 +85 val_85 12 2008-04-09 +77 val_77 12 2008-04-09 +0 val_0 12 2008-04-09 +87 val_87 12 2008-04-09 +15 val_15 12 2008-04-09 +72 val_72 12 2008-04-09 +90 val_90 12 2008-04-09 +19 val_19 12 2008-04-09 +10 val_10 12 2008-04-09 +5 val_5 12 2008-04-09 +58 val_58 12 2008-04-09 +35 val_35 12 2008-04-09 +95 val_95 12 2008-04-09 +11 val_11 12 2008-04-09 +34 val_34 12 2008-04-09 +42 val_42 12 2008-04-09 +78 val_78 12 2008-04-09 +76 val_76 12 2008-04-09 +41 val_41 12 2008-04-09 +30 val_30 12 2008-04-09 +64 val_64 12 2008-04-09 +76 val_76 12 2008-04-09 +74 val_74 12 2008-04-09 +69 val_69 12 2008-04-09 +33 val_33 12 2008-04-09 +70 val_70 12 2008-04-09 +5 val_5 12 2008-04-09 +2 val_2 12 2008-04-09 +35 val_35 12 2008-04-09 +80 val_80 12 2008-04-09 +44 val_44 12 2008-04-09 +53 val_53 12 2008-04-09 +90 val_90 12 2008-04-09 +12 val_12 12 2008-04-09 +5 val_5 12 2008-04-09 +70 val_70 12 2008-04-09 +24 val_24 12 2008-04-09 +70 val_70 12 2008-04-09 +83 val_83 12 2008-04-09 +26 val_26 12 2008-04-09 +67 val_67 12 2008-04-09 +18 val_18 12 2008-04-09 +9 val_9 12 2008-04-09 +18 val_18 12 2008-04-09 +97 val_97 12 2008-04-09 +84 val_84 12 2008-04-09 +28 val_28 12 2008-04-09 +37 val_37 12 2008-04-09 +90 val_90 12 2008-04-09 +97 val_97 12 2008-04-09 diff --git a/ql/src/test/results/clientpositive/input_testsequencefile.q.out b/ql/src/test/results/clientpositive/input_testsequencefile.q.out index f3f0d982f..e71744147 100644 --- a/ql/src/test/results/clientpositive/input_testsequencefile.q.out +++ b/ql/src/test/results/clientpositive/input_testsequencefile.q.out @@ -18,20 +18,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest4_sequencefile - serde: simple_meta input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.mapred.SequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest4_sequencefile Stage: Stage-0 Move Operator tables: + replace: table: - name: dest4_sequencefile - serde: simple_meta input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.mapred.SequenceFileOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest4_sequencefile 238 val_238 @@ -534,4 +534,3 @@ STAGE PLANS: 400 val_400 200 val_200 97 val_97 - diff --git a/ql/src/test/results/clientpositive/input_testxpath.q.out b/ql/src/test/results/clientpositive/input_testxpath.q.out index 14c6f2b4a..2f8df420e 100644 --- a/ql/src/test/results/clientpositive/input_testxpath.q.out +++ b/ql/src/test/results/clientpositive/input_testxpath.q.out @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_thrift lint) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) mystring))))) + (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_thrift lint) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) mystring)) (TOK_SELEXPR ([ (TOK_COLREF src_thrift mstringstring) 'key_2'))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -16,32 +16,33 @@ STAGE PLANS: type: int expr: lintstring[0].mystring type: string + expr: mstringstring['key_2'] + type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: - + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 -0 0 -2 1 -4 8 -6 27 -8 64 -10 125 -12 216 -14 343 -16 512 -18 729 +0 0 NULL +2 1 NULL +4 8 value_2 +6 27 NULL +8 64 NULL +10 125 NULL +12 216 NULL +14 343 NULL +16 512 NULL +18 729 NULL diff --git a/ql/src/test/results/clientpositive/input_testxpath2.q.out b/ql/src/test/results/clientpositive/input_testxpath2.q.out new file mode 100644 index 000000000..a563d9e82 --- /dev/null +++ b/ql/src/test/results/clientpositive/input_testxpath2.q.out @@ -0,0 +1,52 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift lint))) (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift lintstring))) (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift mstringstring)))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (TOK_COLREF src_thrift lint)) (NOT (TOK_FUNCTION TOK_ISNULL (TOK_COLREF src_thrift mstringstring))))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src_thrift + Filter Operator + predicate: + expr: (lint is not null and not mstringstring is null) + type: Boolean + Select Operator + expressions: + expr: size(lint) + type: int + expr: size(lintstring) + type: int + expr: size(mstringstring) + type: int + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +3 1 1 +3 1 1 +3 1 1 +3 1 1 +3 1 1 +3 1 1 +3 1 1 +3 1 1 +3 1 1 +3 1 1 diff --git a/ql/src/test/results/clientpositive/input_testxpath3.q.out b/ql/src/test/results/clientpositive/input_testxpath3.q.out new file mode 100644 index 000000000..79b6843ce --- /dev/null +++ b/ql/src/test/results/clientpositive/input_testxpath3.q.out @@ -0,0 +1,38 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_thrift mstringstring) 'key_9')) (TOK_SELEXPR (. (TOK_COLREF src_thrift lintstring) myint))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src_thrift + Select Operator + expressions: + expr: mstringstring['key_9'] + type: string + expr: lintstring.myint + type: array + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +NULL [0] +NULL [1] +NULL [4] +NULL [9] +NULL [16] +NULL [25] +NULL [36] +NULL [49] +NULL [64] +value_9 [81] diff --git a/ql/src/test/results/clientpositive/inputddl1.q.out b/ql/src/test/results/clientpositive/inputddl1.q.out index ced4f82f3..98d3b70e4 100644 --- a/ql/src/test/results/clientpositive/inputddl1.q.out +++ b/ql/src/test/results/clientpositive/inputddl1.q.out @@ -8,10 +8,10 @@ STAGE PLANS: Stage: Stage-0 Create Table Operator: Create Table - isExternal: - name: INPUTDDL1 + columns: key int, value string # buckets: -1 + name: INPUTDDL1 + isExternal: isSequenceFile: - columns: key int, value string diff --git a/ql/src/test/results/clientpositive/inputddl2.q.out b/ql/src/test/results/clientpositive/inputddl2.q.out index 307771dcb..fb24c4816 100644 --- a/ql/src/test/results/clientpositive/inputddl2.q.out +++ b/ql/src/test/results/clientpositive/inputddl2.q.out @@ -8,12 +8,12 @@ STAGE PLANS: Stage: Stage-0 Create Table Operator: Create Table - isExternal: - name: INPUTDDL2 - # buckets: -1 - isSequenceFile: columns: key int, value string + # buckets: -1 partition columns: ds datetime, country string + name: INPUTDDL2 + isExternal: + isSequenceFile: key int diff --git a/ql/src/test/results/clientpositive/inputddl3.q.out b/ql/src/test/results/clientpositive/inputddl3.q.out index 96258e612..da323d1ab 100644 --- a/ql/src/test/results/clientpositive/inputddl3.q.out +++ b/ql/src/test/results/clientpositive/inputddl3.q.out @@ -8,12 +8,12 @@ STAGE PLANS: Stage: Stage-0 Create Table Operator: Create Table - isExternal: - name: INPUTDDL3 - # buckets: -1 + columns: key int, value string field delimiter: + # buckets: -1 + name: INPUTDDL3 + isExternal: isSequenceFile: - columns: key int, value string key int diff --git a/ql/src/test/results/clientpositive/inputddl4.q.out b/ql/src/test/results/clientpositive/inputddl4.q.out index 8d02ee153..ef3d5e2f6 100644 --- a/ql/src/test/results/clientpositive/inputddl4.q.out +++ b/ql/src/test/results/clientpositive/inputddl4.q.out @@ -7,3 +7,14 @@ properties map ip string 'IP Address of the User' ds datetime country string +viewtime datetime +userid int +page_url string +referrer_url string +friends array +properties map +ip string 'IP Address of the User' +ds datetime +country string +Detailed Table Information: +Table(tableName:inputddl4,dbName:default,owner:njain,createTime:1224285030,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:viewtime,type:datetime,comment:null), FieldSchema(name:userid,type:int,comment:null), FieldSchema(name:page_url,type:string,comment:null), FieldSchema(name:referrer_url,type:string,comment:null), FieldSchema(name:friends,type:array,comment:null), FieldSchema(name:properties,type:map,comment:null), FieldSchema(name:ip,type:string,comment:'IP Address of the User')],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl4,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:32,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[userid],sortCols:[Order(col:viewtime,order:1)],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null), FieldSchema(name:country,type:string,comment:null)],parameters:{comment='This is the page view table'}) diff --git a/ql/src/test/results/clientpositive/inputddl5.q.out b/ql/src/test/results/clientpositive/inputddl5.q.out index 5ca6b8b03..172c19c46 100644 --- a/ql/src/test/results/clientpositive/inputddl5.q.out +++ b/ql/src/test/results/clientpositive/inputddl5.q.out @@ -1,5 +1,3 @@ name string 邵铮 - 1 - diff --git a/ql/src/test/results/clientpositive/inputddl6.q.out b/ql/src/test/results/clientpositive/inputddl6.q.out new file mode 100644 index 000000000..07786d110 --- /dev/null +++ b/ql/src/test/results/clientpositive/inputddl6.q.out @@ -0,0 +1,29 @@ +key string +value string +ds datetime +Detailed Table Information: +Table(tableName:inputddl6,dbName:default,owner:njain,createTime:1224285279,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl6,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:ds,type:datetime,comment:null)],parameters:{SORTBUCKETCOLSPREFIX=TRUE}) +key string +value string +ds datetime + +Detailed Partition Information: +Partition(values:[2008-04-08],dbName:default,tableName:inputddl6,createTime:0,lastAccessTime:0,sd:StorageDescriptor(cols:[FieldSchema(name:key,type:string,comment:null), FieldSchema(name:value,type:string,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/inputddl6/ds=2008-04-08,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),parameters:{}) +ds=2008-04-08 +ds=2008-04-09 +ds=2008-04-09 +ABSTRACT SYNTAX TREE: + (TOK_DESCTABLE (TOK_TAB INPUTDDL6 (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-09'))) EXTENDED) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Describe Table Operator: + Describe Table + partition: + ds 2008-04-09 + table: INPUTDDL6 + + diff --git a/ql/src/test/results/clientpositive/join1.q.out b/ql/src/test/results/clientpositive/join1.q.out index e744eb7a1..ef1949d1d 100644 --- a/ql/src/test/results/clientpositive/join1.q.out +++ b/ql/src/test/results/clientpositive/join1.q.out @@ -8,60 +8,60 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} - condition map: - Inner Join 0 to 1 - Select Operator - expressions: - expr: 0 - type: string - expr: 3 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src2 Reduce Output Operator - tag: 1 key expressions: expr: key type: string + # partition fields: 1 + tag: 1 value expressions: expr: key type: string expr: value type: string - # partition fields: 1 src1 Reduce Output Operator - tag: 0 key expressions: expr: key type: string + # partition fields: 1 + tag: 0 value expressions: expr: key type: string expr: value type: string - # partition fields: 1 + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 val_0 @@ -1092,4 +1092,3 @@ STAGE PLANS: 98 val_98 98 val_98 98 val_98 - diff --git a/ql/src/test/results/clientpositive/join2.q.out b/ql/src/test/results/clientpositive/join2.q.out index fc899268a..137678774 100644 --- a/ql/src/test/results/clientpositive/join2.q.out +++ b/ql/src/test/results/clientpositive/join2.q.out @@ -10,83 +10,65 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} - condition map: - Inner Join 0 to 1 - File Output Operator - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src2 Reduce Output Operator - tag: 1 key expressions: expr: key type: string + # partition fields: 1 + tag: 1 value expressions: expr: key type: string expr: value type: string - # partition fields: 1 src1 Reduce Output Operator - tag: 0 key expressions: expr: key type: string + # partition fields: 1 + tag: 0 value expressions: expr: key type: string expr: value type: string - # partition fields: 1 - - Stage: Stage-2 - Map Reduce Reduce Operator Tree: Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3} - 1 {VALUE.0} {VALUE.1} condition map: Inner Join 0 to 1 - Select Operator - expressions: - expr: 2 - type: string - expr: 5 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-2 + Map Reduce Alias -> Map Operator Tree: src3 Reduce Output Operator - tag: 1 key expressions: expr: UDFToDouble(key) type: double + # partition fields: 1 + tag: 1 value expressions: expr: key type: string expr: value type: string - # partition fields: 1 $INTNAME Reduce Output Operator - tag: 0 key expressions: expr: (UDFToDouble(0) + UDFToDouble(2)) type: double + # partition fields: 1 + tag: 0 value expressions: expr: 2 type: string @@ -96,17 +78,35 @@ STAGE PLANS: type: string expr: 1 type: string - # partition fields: 1 + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 2 + type: string + expr: 5 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 val_0 @@ -581,4 +581,3 @@ STAGE PLANS: 42 val_84 42 val_84 43 val_86 - diff --git a/ql/src/test/results/clientpositive/join3.q.out b/ql/src/test/results/clientpositive/join3.q.out index b87fa8a02..6f794f3d4 100644 --- a/ql/src/test/results/clientpositive/join3.q.out +++ b/ql/src/test/results/clientpositive/join3.q.out @@ -8,74 +8,74 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} - 2 {VALUE.0} {VALUE.1} - condition map: - Inner Join 0 to 1 - Inner Join 0 to 1 - Select Operator - expressions: - expr: 0 - type: string - expr: 5 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: src2 Reduce Output Operator - tag: 1 key expressions: expr: key type: string + # partition fields: 1 + tag: 1 value expressions: expr: key type: string expr: value type: string - # partition fields: 1 src3 Reduce Output Operator - tag: 2 key expressions: expr: key type: string + # partition fields: 1 + tag: 2 value expressions: expr: key type: string expr: value type: string - # partition fields: 1 src1 Reduce Output Operator - tag: 0 key expressions: expr: key type: string + # partition fields: 1 + tag: 0 value expressions: expr: key type: string expr: value type: string - # partition fields: 1 + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + 2 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 5 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 0 val_0 @@ -2732,4 +2732,3 @@ STAGE PLANS: 98 val_98 98 val_98 98 val_98 - diff --git a/ql/src/test/results/clientpositive/join4.q.out b/ql/src/test/results/clientpositive/join4.q.out index 23f91ebcf..1ba61f432 100644 --- a/ql/src/test/results/clientpositive/join4.q.out +++ b/ql/src/test/results/clientpositive/join4.q.out @@ -8,39 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} - condition map: - Left Outer Join0 to 1 - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: c:b:src2 Filter Operator @@ -54,16 +21,16 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 1 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 1 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 c:a:src1 Filter Operator predicate: @@ -76,26 +43,59 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 0 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 0 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 11 val_11 NULL NULL @@ -109,4 +109,3 @@ STAGE PLANS: 18 val_18 18 val_18 18 val_18 18 val_18 19 val_19 19 val_19 - diff --git a/ql/src/test/results/clientpositive/join5.q.out b/ql/src/test/results/clientpositive/join5.q.out index f5beb94a0..f831b9b75 100644 --- a/ql/src/test/results/clientpositive/join5.q.out +++ b/ql/src/test/results/clientpositive/join5.q.out @@ -8,39 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} - condition map: - Right Outer Join0 to 1 - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: c:b:src2 Filter Operator @@ -54,16 +21,16 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 1 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 1 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 c:a:src1 Filter Operator predicate: @@ -76,26 +43,59 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 0 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 0 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 + Reduce Operator Tree: + Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 17 val_17 17 val_17 @@ -107,4 +107,3 @@ STAGE PLANS: NULL NULL 20 val_20 NULL NULL 24 val_24 NULL NULL 24 val_24 - diff --git a/ql/src/test/results/clientpositive/join6.q.out b/ql/src/test/results/clientpositive/join6.q.out index 5bc58005e..55e7db4b8 100644 --- a/ql/src/test/results/clientpositive/join6.q.out +++ b/ql/src/test/results/clientpositive/join6.q.out @@ -8,39 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} - condition map: - Outer Join 0 to 1 - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: c:b:src2 Filter Operator @@ -54,16 +21,16 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 1 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 1 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 c:a:src1 Filter Operator predicate: @@ -76,26 +43,59 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 0 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 0 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 + Reduce Operator Tree: + Join Operator + condition map: + Outer Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 11 val_11 NULL NULL @@ -112,4 +112,3 @@ STAGE PLANS: NULL NULL 20 val_20 NULL NULL 24 val_24 NULL NULL 24 val_24 - diff --git a/ql/src/test/results/clientpositive/join7.q.out b/ql/src/test/results/clientpositive/join7.q.out index 227dae80d..730947188 100644 --- a/ql/src/test/results/clientpositive/join7.q.out +++ b/ql/src/test/results/clientpositive/join7.q.out @@ -8,49 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} - 2 {VALUE.0} {VALUE.1} - condition map: - Outer Join 0 to 1 - Left Outer Join0 to 1 - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - expr: 4 - type: string - expr: 5 - type: string - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - expr: 4 - type: string - expr: 5 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: c:b:src2 Filter Operator @@ -64,16 +21,16 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 1 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 1 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 c:a:src1 Filter Operator predicate: @@ -86,16 +43,16 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 0 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 0 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 c:c:src3 Filter Operator predicate: @@ -108,26 +65,69 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 2 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 2 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 + Reduce Operator Tree: + Join Operator + condition map: + Outer Join 0 to 1 + Left Outer Join0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + 2 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + expr: 4 + type: string + expr: 5 + type: string + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + expr: 4 + type: string + expr: 5 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 11 val_11 NULL NULL NULL NULL @@ -146,4 +146,3 @@ NULL NULL 24 val_24 NULL NULL NULL NULL 24 val_24 NULL NULL NULL NULL 24 val_24 NULL NULL NULL NULL 24 val_24 NULL NULL - diff --git a/ql/src/test/results/clientpositive/join8.q.out b/ql/src/test/results/clientpositive/join8.q.out index 074c98140..77199bbca 100644 --- a/ql/src/test/results/clientpositive/join8.q.out +++ b/ql/src/test/results/clientpositive/join8.q.out @@ -8,43 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Reduce Operator Tree: - Join Operator - condition expressions: - 0 {VALUE.0} {VALUE.1} - 1 {VALUE.0} {VALUE.1} - condition map: - Left Outer Join0 to 1 - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - Filter Operator - predicate: - expr: (2 is null and 0 is not null) - type: Boolean - Select Operator - expressions: - expr: 0 - type: string - expr: 1 - type: string - expr: 2 - type: string - expr: 3 - type: string - File Output Operator - table: - name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: c:b:src2 Filter Operator @@ -58,16 +21,16 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 1 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 1 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 c:a:src1 Filter Operator predicate: @@ -80,26 +43,63 @@ STAGE PLANS: expr: value type: string Reduce Output Operator - tag: 0 key expressions: expr: 0 type: string + # partition fields: 1 + tag: 0 value expressions: expr: 0 type: string expr: 1 type: string - # partition fields: 1 + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} + 1 {VALUE.0} {VALUE.1} + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + Filter Operator + predicate: + expr: (2 is null and 0 is not null) + type: Boolean + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 11 val_11 NULL NULL @@ -107,4 +107,3 @@ STAGE PLANS: 12 val_12 NULL NULL 15 val_15 NULL NULL 15 val_15 NULL NULL - diff --git a/ql/src/test/results/clientpositive/join9.q.out b/ql/src/test/results/clientpositive/join9.q.out new file mode 100644 index 000000000..a293099a8 --- /dev/null +++ b/ql/src/test/results/clientpositive/join9.q.out @@ -0,0 +1,1169 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcpart src1) (TOK_TABREF src src2) (= (TOK_COLREF src1 key) (TOK_COLREF src2 key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key)) (TOK_SELEXPR (TOK_COLREF src2 value))) (TOK_WHERE (and (= (TOK_COLREF src1 ds) '2008-04-08') (= (TOK_COLREF src1 hr) '12'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src2 + Reduce Output Operator + key expressions: + expr: key + type: string + # partition fields: 1 + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + src1 + Reduce Output Operator + key expressions: + expr: key + type: string + # partition fields: 1 + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name src + serialization.ddl struct src { string key, string value} + serialization.format 1 + columns key,value + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + Partition + partition values: + ds 2008-04-08 + hr 12 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcpart + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcpart + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3} + 1 {VALUE.0} {VALUE.1} + Filter Operator + predicate: + expr: ((2 = '2008-04-08') and (3 = '12')) + type: Boolean + Select Operator + expressions: + expr: 0 + type: string + expr: 5 + type: string + File Output Operator + directory: /tmp/hive-njain/465936460/211979279.10000.insclause-0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + columns key,value + SORTBUCKETCOLSPREFIX TRUE + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + source: /tmp/hive-njain/465936460/211979279.10000.insclause-0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + columns key,value + SORTBUCKETCOLSPREFIX TRUE + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +105 val_105 +11 val_11 +111 val_111 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +114 val_114 +116 val_116 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +126 val_126 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +131 val_131 +133 val_133 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +136 val_136 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +143 val_143 +145 val_145 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +150 val_150 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +153 val_153 +155 val_155 +156 val_156 +157 val_157 +158 val_158 +160 val_160 +162 val_162 +163 val_163 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +166 val_166 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +168 val_168 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +17 val_17 +170 val_170 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +177 val_177 +178 val_178 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +180 val_180 +181 val_181 +183 val_183 +186 val_186 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +189 val_189 +19 val_19 +190 val_190 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +192 val_192 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +194 val_194 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +196 val_196 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +2 val_2 +20 val_20 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +201 val_201 +202 val_202 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +214 val_214 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +218 val_218 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +222 val_222 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +226 val_226 +228 val_228 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +235 val_235 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +241 val_241 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +244 val_244 +247 val_247 +248 val_248 +249 val_249 +252 val_252 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +257 val_257 +258 val_258 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +260 val_260 +262 val_262 +263 val_263 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +266 val_266 +27 val_27 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +274 val_274 +275 val_275 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +28 val_28 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +283 val_283 +284 val_284 +285 val_285 +286 val_286 +287 val_287 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +289 val_289 +291 val_291 +292 val_292 +296 val_296 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +30 val_30 +302 val_302 +305 val_305 +306 val_306 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +308 val_308 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +310 val_310 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +315 val_315 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +323 val_323 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +33 val_33 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +332 val_332 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +335 val_335 +336 val_336 +338 val_338 +339 val_339 +34 val_34 +341 val_341 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +345 val_345 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +351 val_351 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +356 val_356 +360 val_360 +362 val_362 +364 val_364 +365 val_365 +366 val_366 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +368 val_368 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +373 val_373 +374 val_374 +375 val_375 +377 val_377 +378 val_378 +379 val_379 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +386 val_386 +389 val_389 +392 val_392 +393 val_393 +394 val_394 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +4 val_4 +400 val_400 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +402 val_402 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +403 val_403 +404 val_404 +404 val_404 +404 val_404 +404 val_404 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +407 val_407 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +409 val_409 +41 val_41 +411 val_411 +413 val_413 +413 val_413 +413 val_413 +413 val_413 +414 val_414 +414 val_414 +414 val_414 +414 val_414 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +417 val_417 +418 val_418 +419 val_419 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +421 val_421 +424 val_424 +424 val_424 +424 val_424 +424 val_424 +427 val_427 +429 val_429 +429 val_429 +429 val_429 +429 val_429 +43 val_43 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +430 val_430 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +431 val_431 +432 val_432 +435 val_435 +436 val_436 +437 val_437 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +438 val_438 +439 val_439 +439 val_439 +439 val_439 +439 val_439 +44 val_44 +443 val_443 +444 val_444 +446 val_446 +448 val_448 +449 val_449 +452 val_452 +453 val_453 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +454 val_454 +455 val_455 +457 val_457 +458 val_458 +458 val_458 +458 val_458 +458 val_458 +459 val_459 +459 val_459 +459 val_459 +459 val_459 +460 val_460 +462 val_462 +462 val_462 +462 val_462 +462 val_462 +463 val_463 +463 val_463 +463 val_463 +463 val_463 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +466 val_466 +467 val_467 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +468 val_468 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +47 val_47 +470 val_470 +472 val_472 +475 val_475 +477 val_477 +478 val_478 +478 val_478 +478 val_478 +478 val_478 +479 val_479 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +480 val_480 +481 val_481 +482 val_482 +483 val_483 +484 val_484 +485 val_485 +487 val_487 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +489 val_489 +490 val_490 +491 val_491 +492 val_492 +492 val_492 +492 val_492 +492 val_492 +493 val_493 +494 val_494 +495 val_495 +496 val_496 +497 val_497 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +498 val_498 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +53 val_53 +54 val_54 +57 val_57 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +64 val_64 +65 val_65 +66 val_66 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +69 val_69 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +74 val_74 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +77 val_77 +78 val_78 +8 val_8 +80 val_80 +82 val_82 +83 val_83 +83 val_83 +83 val_83 +83 val_83 +84 val_84 +84 val_84 +84 val_84 +84 val_84 +85 val_85 +86 val_86 +87 val_87 +9 val_9 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +95 val_95 +95 val_95 +95 val_95 +95 val_95 +96 val_96 +97 val_97 +97 val_97 +97 val_97 +97 val_97 +98 val_98 +98 val_98 +98 val_98 +98 val_98 diff --git a/ql/src/test/results/clientpositive/nullinput.q.out b/ql/src/test/results/clientpositive/nullinput.q.out new file mode 100644 index 000000000..e69de29bb diff --git a/ql/src/test/results/clientpositive/sample1.q.out b/ql/src/test/results/clientpositive/sample1.q.out index 29045fa4d..5d84e1b19 100644 --- a/ql/src/test/results/clientpositive/sample1.q.out +++ b/ql/src/test/results/clientpositive/sample1.q.out @@ -8,31 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Path -> Alias: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 - Path -> Partition: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 - Partition - - properties: - name srcpart - bucket_field_name - serialization.format 1 - columns key,value - partition_columns ds/hr - field_to_dimension - bucket_count -1 - serialization.lib simple_meta - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart - name: srcpart - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - partition values: - ds 2008-04-08 - hr 11 Alias -> Map Operator Tree: s Filter Operator @@ -50,48 +25,70 @@ STAGE PLANS: expr: hr type: string File Output Operator + directory: /tmp/hive-njain/1799446246/1449752317.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 columns key,value,dt,hr SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - directory: /tmp/hive-zshao/640225297.10000.insclause-0 Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + Partition + partition values: + ds 2008-04-08 + hr 11 + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcpart + serialization.ddl struct srcpart { string key, string value} + serialization.format 1 + columns key,value + partition_columns ds/hr + bucket_count -1 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcpart Stage: Stage-0 Move Operator tables: + replace: + source: /tmp/hive-njain/1799446246/1449752317.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 columns key,value,dt,hr SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: - source: /tmp/hive-zshao/640225297.10000.insclause-0 238 val_238 2008-04-08 11 @@ -594,4 +591,3 @@ STAGE PLANS: 400 val_400 2008-04-08 11 200 val_200 2008-04-08 11 97 val_97 2008-04-08 11 - diff --git a/ql/src/test/results/clientpositive/sample2.q.out b/ql/src/test/results/clientpositive/sample2.q.out index 1582ce1c0..7a9fa5b10 100644 --- a/ql/src/test/results/clientpositive/sample2.q.out +++ b/ql/src/test/results/clientpositive/sample2.q.out @@ -8,27 +8,6 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Path -> Alias: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt - Path -> Partition: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt - Partition - - properties: - name srcbucket - bucket_field_name - serialization.format 1 - columns key,value - field_to_dimension - bucket_count 2 - serialization.lib simple_meta - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - name: srcbucket - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: s Select Operator @@ -38,48 +17,66 @@ STAGE PLANS: expr: value type: string File Output Operator + directory: /tmp/hive-njain/1178661057/1120939755.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - directory: /tmp/hive-zshao/83464218.10000.insclause-0 Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcbucket + serialization.ddl struct srcbucket { string key, string value} + serialization.format 1 + columns key,value + bucket_count 2 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcbucket Stage: Stage-0 Move Operator tables: + replace: + source: /tmp/hive-njain/1178661057/1120939755.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: - source: /tmp/hive-zshao/83464218.10000.insclause-0 238 val_238 @@ -582,4 +579,3 @@ STAGE PLANS: 400 val_400 200 val_200 97 val_97 - diff --git a/ql/src/test/results/clientpositive/sample4.q.out b/ql/src/test/results/clientpositive/sample4.q.out index e86ff25d6..b6684f971 100644 --- a/ql/src/test/results/clientpositive/sample4.q.out +++ b/ql/src/test/results/clientpositive/sample4.q.out @@ -8,32 +8,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Path -> Alias: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - Path -> Partition: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - Partition - - properties: - name srcbucket - bucket_field_name - serialization.format 1 - columns key,value - field_to_dimension - bucket_count 2 - serialization.lib simple_meta - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - name: srcbucket - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: s Filter Operator predicate: - expr: (((default_sample_hashfn(key) & 2147483647) % 2) = 1) + expr: (((default_sample_hashfn(key) & 2147483647) % 2) = 0) type: Boolean Select Operator expressions: @@ -42,551 +21,563 @@ STAGE PLANS: expr: value type: string File Output Operator + directory: /tmp/hive-njain/594156852/317367185.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - directory: /tmp/hive-zshao/4031240.10000.insclause-0 Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcbucket + serialization.ddl struct srcbucket { string key, string value} + serialization.format 1 + columns key,value + bucket_count 2 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcbucket Stage: Stage-0 Move Operator tables: + replace: + source: /tmp/hive-njain/594156852/317367185.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: - source: /tmp/hive-zshao/4031240.10000.insclause-0 - - -238 val_238 -311 val_311 -27 val_27 -409 val_409 -278 val_278 -98 val_98 -265 val_265 -193 val_193 -401 val_401 -128 val_128 -146 val_146 -429 val_429 -469 val_469 -281 val_281 -209 val_209 -403 val_403 -166 val_166 -430 val_430 -252 val_252 -292 val_292 -287 val_287 -153 val_153 -193 val_193 -494 val_494 -207 val_207 -199 val_199 -399 val_399 -247 val_247 -489 val_489 -162 val_162 -377 val_377 -397 val_397 -342 val_342 -195 val_195 -113 val_113 -155 val_155 -203 val_203 -339 val_339 -128 val_128 -311 val_311 -302 val_302 -205 val_205 -438 val_438 -489 val_489 -157 val_157 -221 val_221 -92 val_92 -111 val_111 -47 val_47 -72 val_72 -427 val_427 -399 val_399 -382 val_382 -498 val_498 -386 val_386 -469 val_469 -54 val_54 -216 val_216 -430 val_430 -278 val_278 -289 val_289 -221 val_221 -65 val_65 -311 val_311 -137 val_137 -241 val_241 -83 val_83 -333 val_333 -180 val_180 -12 val_12 -230 val_230 -67 val_67 -384 val_384 -489 val_489 -353 val_353 -373 val_373 -272 val_272 -348 val_348 -58 val_58 -230 val_230 -348 val_348 -463 val_463 -179 val_179 -119 val_119 -496 val_496 -322 val_322 -197 val_197 -393 val_393 -454 val_454 -100 val_100 -298 val_298 -199 val_199 -191 val_191 -418 val_418 -96 val_96 -230 val_230 -205 val_205 -120 val_120 -131 val_131 -43 val_43 -436 val_436 -469 val_469 -308 val_308 -481 val_481 -98 val_98 -197 val_197 -409 val_409 -470 val_470 -137 val_137 -85 val_85 -490 val_490 -87 val_87 -364 val_364 -179 val_179 -395 val_395 -238 val_238 -72 val_72 -90 val_90 -10 val_10 -306 val_306 -272 val_272 -331 val_331 -401 val_401 -452 val_452 -177 val_177 -5 val_5 -317 val_317 -395 val_395 -58 val_58 -168 val_168 -34 val_34 -229 val_229 -472 val_472 -322 val_322 -498 val_498 -160 val_160 -195 val_195 -430 val_430 -119 val_119 -489 val_489 -458 val_458 -78 val_78 -76 val_76 -41 val_41 -223 val_223 -492 val_492 -449 val_449 -218 val_218 -30 val_30 -209 val_209 -76 val_76 -74 val_74 -342 val_342 -69 val_69 -230 val_230 -368 val_368 -296 val_296 -113 val_113 -216 val_216 -344 val_344 -274 val_274 -485 val_485 -223 val_223 -256 val_256 -263 val_263 -70 val_70 -487 val_487 -401 val_401 -191 val_191 -5 val_5 -438 val_438 -128 val_128 -467 val_467 -432 val_432 -229 val_229 -469 val_469 -463 val_463 -283 val_283 -331 val_331 -193 val_193 -335 val_335 -104 val_104 -366 val_366 -175 val_175 -403 val_403 -483 val_483 -409 val_409 -401 val_401 -258 val_258 -90 val_90 -203 val_203 -348 val_348 -12 val_12 -201 val_201 -164 val_164 -454 val_454 -478 val_478 -298 val_298 -164 val_164 -382 val_382 -5 val_5 -70 val_70 -397 val_397 -351 val_351 -104 val_104 -70 val_70 -438 val_438 -119 val_119 -414 val_414 -360 val_360 -199 val_199 -120 val_120 -429 val_429 -443 val_443 -230 val_230 -478 val_478 -317 val_317 -333 val_333 -207 val_207 -249 val_249 -265 val_265 -83 val_83 -353 val_353 -214 val_214 -133 val_133 -175 val_175 -454 val_454 -375 val_375 -401 val_401 -421 val_421 -407 val_407 -384 val_384 -256 val_256 -67 val_67 -384 val_384 -379 val_379 -18 val_18 -492 val_492 -100 val_100 -298 val_298 -9 val_9 -498 val_498 -146 val_146 -458 val_458 -362 val_362 -186 val_186 -285 val_285 -348 val_348 -18 val_18 -281 val_281 -344 val_344 -469 val_469 -315 val_315 -348 val_348 -414 val_414 -126 val_126 -90 val_90 -403 val_403 -474 val_475 -281 val_282 -179 val_180 -135 val_136 -423 val_424 -245 val_246 -425 val_426 -241 val_242 -177 val_178 -272 val_273 -287 val_288 -373 val_374 -447 val_448 -443 val_444 -175 val_176 -278 val_279 -296 val_297 -126 val_127 -249 val_250 -21 val_22 -441 val_442 -106 val_107 -371 val_372 -153 val_154 -490 val_491 -421 val_422 -243 val_244 -128 val_129 -10 val_11 -133 val_134 -333 val_334 -391 val_392 -485 val_486 -241 val_242 -85 val_86 -335 val_336 -476 val_477 -494 val_495 -94 val_95 -485 val_486 -393 val_394 -409 val_410 -50 val_51 -265 val_266 -128 val_129 -63 val_64 -344 val_345 -135 val_136 -3 val_4 -331 val_332 -209 val_210 -281 val_282 -56 val_57 -254 val_255 -304 val_305 -238 val_239 -492 val_493 -289 val_290 -421 val_422 -58 val_59 -5 val_6 -249 val_250 -438 val_439 -360 val_361 -467 val_468 -175 val_176 -429 val_430 -281 val_282 -16 val_17 -119 val_120 -492 val_493 -120 val_121 -306 val_307 -393 val_394 -427 val_428 -386 val_387 -65 val_66 -388 val_389 -184 val_185 -375 val_376 -191 val_192 -197 val_198 -122 val_123 -438 val_439 -351 val_352 -23 val_24 -328 val_329 -382 val_383 -342 val_343 -102 val_103 -487 val_488 -331 val_332 -409 val_410 -463 val_464 -177 val_178 -399 val_400 -476 val_477 -258 val_259 -252 val_253 -87 val_88 -340 val_341 -443 val_444 -407 val_408 -355 val_356 -164 val_165 -104 val_105 -117 val_118 -326 val_327 -47 val_48 -241 val_242 -450 val_451 -140 val_141 -212 val_213 -308 val_309 -30 val_31 -405 val_406 -119 val_120 -49 val_50 -153 val_154 -21 val_22 -157 val_158 -416 val_417 -199 val_200 -375 val_376 -89 val_90 -395 val_396 -386 val_387 -454 val_455 -364 val_365 -89 val_90 -461 val_462 -52 val_53 -168 val_169 -384 val_385 -313 val_314 -324 val_325 -69 val_70 -245 val_246 -328 val_329 -322 val_323 -122 val_123 -241 val_242 -469 val_470 -384 val_385 -76 val_77 -104 val_105 -481 val_482 -32 val_33 -58 val_59 -351 val_352 -70 val_71 -443 val_444 -399 val_400 -205 val_206 -52 val_53 -21 val_22 -317 val_318 -209 val_210 -429 val_430 -160 val_161 -267 val_268 -454 val_455 -76 val_77 -412 val_413 -16 val_17 -375 val_376 -489 val_490 -126 val_127 -274 val_275 -494 val_495 -216 val_217 -470 val_471 -302 val_303 -119 val_120 -375 val_376 -391 val_392 -287 val_288 -450 val_451 -375 val_376 -12 val_13 -384 val_385 -29 val_30 -151 val_152 -52 val_53 -300 val_301 -21 val_22 -104 val_105 -436 val_437 -61 val_62 -407 val_408 -335 val_336 -164 val_165 -371 val_372 -432 val_433 -496 val_497 -355 val_356 -144 val_145 -87 val_88 -382 val_383 -348 val_349 -122 val_123 -427 val_428 -292 val_293 -182 val_183 -474 val_475 -353 val_354 -52 val_53 -414 val_415 -261 val_262 -65 val_66 -371 val_372 -256 val_257 -292 val_293 -412 val_413 -49 val_50 -478 val_479 -243 val_244 -151 val_152 -100 val_101 -401 val_402 -117 val_118 -137 val_138 -162 val_163 -276 val_277 -78 val_79 -379 val_380 -410 val_411 -157 val_158 -100 val_101 -407 val_408 -371 val_372 -308 val_309 -430 val_431 -135 val_136 -76 val_77 -300 val_301 -337 val_338 -478 val_479 -1 val_2 -89 val_90 -29 val_30 -285 val_286 -227 val_228 -395 val_396 +86 val_86 +165 val_165 +255 val_255 +484 val_484 +150 val_150 +273 val_273 +224 val_224 +369 val_369 +66 val_66 +213 val_213 +406 val_406 +374 val_374 +152 val_152 +145 val_145 +495 val_495 +37 val_37 +327 val_327 +277 val_277 +15 val_15 +82 val_82 +417 val_417 +219 val_219 +338 val_338 +446 val_446 +459 val_459 +394 val_394 +237 val_237 +482 val_482 +174 val_174 +413 val_413 +466 val_466 +208 val_208 +174 val_174 +396 val_396 +417 val_417 +309 val_309 +365 val_365 +266 val_266 +439 val_439 +367 val_367 +325 val_325 +167 val_167 +475 val_475 +17 val_17 +0 val_0 +455 val_455 +316 val_316 +57 val_57 +149 val_149 +345 val_345 +129 val_129 +170 val_170 +20 val_20 +378 val_378 +4 val_4 +280 val_280 +35 val_35 +277 val_277 +208 val_208 +356 val_356 +169 val_169 +125 val_125 +437 val_437 +192 val_192 +286 val_286 +187 val_187 +176 val_176 +459 val_459 +51 val_51 +138 val_138 +103 val_103 +239 val_239 +213 val_213 +176 val_176 +318 val_318 +332 val_332 +275 val_275 +284 val_284 +181 val_181 +260 val_260 +404 val_404 +138 val_138 +217 val_217 +84 val_84 +466 val_466 +8 val_8 +411 val_411 +208 val_208 +24 val_24 +431 val_431 +172 val_172 +42 val_42 +129 val_129 +158 val_158 +0 val_0 +468 val_468 +26 val_26 +165 val_165 +327 val_327 +51 val_51 +404 val_404 +156 val_156 +468 val_468 +95 val_95 +196 val_196 +288 val_288 +457 val_457 +282 val_282 +187 val_187 +318 val_318 +318 val_318 +369 val_369 +316 val_316 +169 val_169 +413 val_413 +77 val_77 +0 val_0 +118 val_118 +134 val_134 +282 val_282 +138 val_138 +419 val_419 +15 val_15 +118 val_118 +307 val_307 +19 val_19 +435 val_435 +277 val_277 +273 val_273 +224 val_224 +309 val_309 +389 val_389 +327 val_327 +242 val_242 +369 val_369 +392 val_392 +242 val_242 +226 val_226 +497 val_497 +402 val_402 +396 val_396 +35 val_35 +336 val_336 +95 val_95 +11 val_11 +233 val_233 +143 val_143 +42 val_42 +321 val_321 +149 val_149 +228 val_228 +138 val_138 +453 val_453 +64 val_64 +468 val_468 +33 val_33 +103 val_103 +367 val_367 +167 val_167 +219 val_219 +239 val_239 +116 val_116 +480 val_480 +288 val_288 +244 val_244 +202 val_202 +316 val_316 +280 val_280 +2 val_2 +35 val_35 +235 val_235 +80 val_80 +44 val_44 +321 val_321 +466 val_466 +53 val_53 +105 val_105 +257 val_257 +406 val_406 +190 val_190 +406 val_406 +114 val_114 +262 val_262 +424 val_424 +396 val_396 +217 val_217 +431 val_431 +125 val_125 +431 val_431 +424 val_424 +187 val_187 +480 val_480 +291 val_291 +24 val_24 +255 val_255 +163 val_163 +200 val_200 +491 val_491 +237 val_237 +439 val_439 +248 val_248 +479 val_479 +305 val_305 +417 val_417 +444 val_444 +169 val_169 +323 val_323 +325 val_325 +277 val_277 +178 val_178 +468 val_468 +310 val_310 +493 val_493 +460 val_460 +480 val_480 +136 val_136 +172 val_172 +462 val_462 +233 val_233 +406 val_406 +189 val_189 +26 val_26 +134 val_134 +462 val_462 +341 val_341 +167 val_167 +273 val_273 +183 val_183 +97 val_97 +84 val_84 +28 val_28 +37 val_37 +448 val_448 +152 val_152 +307 val_307 +194 val_194 +477 val_477 +222 val_222 +169 val_169 +400 val_400 +200 val_200 +97 val_97 +291 val_292 +62 val_63 +271 val_272 +217 val_218 +167 val_168 +468 val_469 +413 val_414 +455 val_456 +231 val_232 +448 val_449 +246 val_247 +440 val_441 +31 val_32 +147 val_148 +428 val_429 +273 val_274 +356 val_357 +217 val_218 +33 val_34 +35 val_36 +402 val_403 +226 val_227 +15 val_16 +110 val_111 +343 val_344 +275 val_276 +0 val_1 +293 val_294 +240 val_241 +286 val_287 +408 val_409 +477 val_478 +455 val_456 +99 val_100 +482 val_483 +367 val_368 +59 val_60 +48 val_49 +424 val_425 +226 val_227 +488 val_489 +349 val_350 +11 val_12 +161 val_162 +123 val_124 +402 val_403 +468 val_469 +314 val_315 +497 val_498 +224 val_225 +4 val_5 +277 val_278 +206 val_207 +101 val_102 +114 val_115 +239 val_240 +389 val_390 +114 val_115 +235 val_236 +390 val_391 +165 val_166 +264 val_265 +196 val_197 +20 val_21 +11 val_12 +129 val_130 +257 val_258 +71 val_72 +453 val_454 +82 val_83 +86 val_87 +323 val_324 +222 val_223 +42 val_43 +411 val_412 +338 val_339 +68 val_69 +185 val_186 +147 val_148 +473 val_474 +376 val_377 +347 val_348 +213 val_214 +291 val_292 +426 val_427 +132 val_133 +446 val_447 +121 val_122 +284 val_285 +129 val_130 +246 val_247 +491 val_492 +369 val_370 +262 val_263 +390 val_391 +352 val_353 +226 val_227 +349 val_350 +480 val_481 +497 val_498 +480 val_481 +475 val_476 +183 val_184 +15 val_16 +318 val_319 +149 val_150 +392 val_393 +77 val_78 +275 val_276 +174 val_175 +114 val_115 +161 val_162 +75 val_76 +264 val_265 +48 val_49 +336 val_337 +390 val_391 +484 val_485 +189 val_190 +305 val_306 +367 val_368 +6 val_7 +260 val_261 +2 val_3 +349 val_350 +170 val_171 +51 val_52 +363 val_364 +118 val_119 +310 val_311 +19 val_20 +80 val_81 +121 val_122 +239 val_240 +358 val_359 +11 val_12 +143 val_144 +341 val_342 +105 val_106 +42 val_43 +363 val_364 +347 val_348 +77 val_78 +293 val_294 +20 val_21 +40 val_41 +8 val_9 +310 val_311 +206 val_207 +381 val_382 +404 val_405 +206 val_207 +385 val_386 +259 val_260 +226 val_227 +262 val_263 +303 val_304 +260 val_261 +404 val_405 +116 val_117 +42 val_43 +303 val_304 +93 val_94 +277 val_278 +406 val_407 +451 val_452 +53 val_54 +132 val_133 +192 val_193 +356 val_357 +352 val_353 +330 val_331 +138 val_139 +257 val_258 +174 val_175 +295 val_296 +105 val_106 +204 val_205 +439 val_440 +374 val_375 +457 val_458 +93 val_94 +196 val_197 +437 val_438 +398 val_399 +35 val_36 +334 val_335 +435 val_436 +60 val_61 +442 val_443 +404 val_405 +446 val_447 +0 val_1 +268 val_269 +491 val_492 +392 val_393 +295 val_296 +93 val_94 +439 val_440 +156 val_157 +341 val_342 +118 val_119 +172 val_173 +244 val_245 +6 val_7 +284 val_285 +136 val_137 +349 val_350 +462 val_463 +123 val_124 +408 val_409 +152 val_153 +310 val_311 +486 val_487 +152 val_153 +378 val_379 +97 val_98 +495 val_496 +385 val_386 +40 val_41 +389 val_390 +259 val_260 +97 val_98 +125 val_126 +178 val_179 +415 val_416 +156 val_157 +228 val_229 +363 val_364 +22 val_23 +248 val_249 +459 val_460 +402 val_403 +62 val_63 +244 val_245 +46 val_47 +341 val_342 +215 val_216 +134 val_135 +196 val_197 +431 val_432 +82 val_83 +440 val_441 +309 val_310 +161 val_162 +468 val_469 +152 val_153 +138 val_139 +161 val_162 +91 val_92 +118 val_119 +178 val_179 +457 val_458 +242 val_243 +35 val_36 +244 val_245 diff --git a/ql/src/test/results/clientpositive/sample5.q.out b/ql/src/test/results/clientpositive/sample5.q.out index 46d1a1ef7..a57ff671c 100644 --- a/ql/src/test/results/clientpositive/sample5.q.out +++ b/ql/src/test/results/clientpositive/sample5.q.out @@ -8,32 +8,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Path -> Alias: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - Path -> Partition: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - Partition - - properties: - name srcbucket - bucket_field_name - serialization.format 1 - columns key,value - field_to_dimension - bucket_count 2 - serialization.lib simple_meta - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - name: srcbucket - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: s Filter Operator predicate: - expr: (((default_sample_hashfn(key) & 2147483647) % 5) = 1) + expr: (((default_sample_hashfn(key) & 2147483647) % 5) = 0) type: Boolean Select Operator expressions: @@ -42,241 +21,272 @@ STAGE PLANS: expr: value type: string File Output Operator + directory: /tmp/hive-njain/535229973/587370170.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - directory: /tmp/hive-zshao/824052972.10000.insclause-0 Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcbucket + serialization.ddl struct srcbucket { string key, string value} + serialization.format 1 + columns key,value + bucket_count 2 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcbucket Stage: Stage-0 Move Operator tables: + replace: + source: /tmp/hive-njain/535229973/587370170.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: - source: /tmp/hive-zshao/824052972.10000.insclause-0 - - -165 val_165 -255 val_255 -278 val_278 -273 val_273 -37 val_37 -327 val_327 -82 val_82 -403 val_403 -417 val_417 -430 val_430 -219 val_219 -287 val_287 -237 val_237 -174 val_174 -494 val_494 -174 val_174 -417 val_417 -377 val_377 -309 val_309 -205 val_205 -345 val_345 -129 val_129 -386 val_386 -192 val_192 -138 val_138 -430 val_430 -278 val_278 -318 val_318 -241 val_241 -138 val_138 -8 val_8 -179 val_179 -129 val_129 -322 val_322 -197 val_197 -96 val_96 -165 val_165 -327 val_327 -205 val_205 -156 val_156 -282 val_282 -197 val_197 -318 val_318 -318 val_318 -87 val_87 -179 val_179 -395 val_395 -282 val_282 -138 val_138 -19 val_19 -435 val_435 -273 val_273 -309 val_309 -327 val_327 -331 val_331 -395 val_395 -336 val_336 -322 val_322 -160 val_160 -430 val_430 -458 val_458 -78 val_78 -41 val_41 -223 val_223 -449 val_449 -228 val_228 -138 val_138 -453 val_453 -64 val_64 -69 val_69 -368 val_368 -296 val_296 -219 val_219 -485 val_485 -223 val_223 -480 val_480 -467 val_467 -331 val_331 -403 val_403 -480 val_480 -291 val_291 -255 val_255 -200 val_200 -237 val_237 -417 val_417 -444 val_444 -480 val_480 -214 val_214 -462 val_462 -133 val_133 -421 val_421 -462 val_462 -458 val_458 -273 val_273 -183 val_183 -28 val_28 -37 val_37 -403 val_403 -200 val_200 -179 val_180 -291 val_292 -241 val_242 -287 val_288 -246 val_247 -278 val_279 -296 val_297 -147 val_148 -273 val_274 -106 val_107 -421 val_422 -133 val_134 -110 val_111 -485 val_486 -241 val_242 -408 val_409 -476 val_477 -494 val_495 -485 val_486 -50 val_51 -3 val_4 -101 val_102 -331 val_332 -390 val_391 -165 val_166 -304 val_305 -264 val_265 -129 val_130 -453 val_454 -421 val_422 -82 val_83 -467 val_468 -147 val_148 -291 val_292 -426 val_427 -386 val_387 -129 val_130 -246 val_247 -197 val_198 -390 val_391 -23 val_24 -480 val_481 -480 val_481 -331 val_332 -183 val_184 -318 val_319 -476 val_477 -174 val_175 -87 val_88 -264 val_265 -336 val_337 -340 val_341 -390 val_391 -363 val_364 -19 val_20 -241 val_242 -363 val_364 -395 val_396 -386 val_387 -8 val_9 -313 val_314 -381 val_382 -69 val_70 -322 val_323 -241 val_242 -32 val_33 -192 val_193 -205 val_206 -138 val_139 -160 val_161 -174 val_175 -412 val_413 -494 val_495 -287 val_288 -435 val_436 -151 val_152 -156 val_157 -462 val_463 -87 val_88 -408 val_409 -412 val_413 -151 val_152 -156 val_157 -228 val_229 -363 val_364 -46 val_47 -78 val_79 -82 val_83 -430 val_431 -309 val_310 -138 val_139 -91 val_92 -395 val_396 +86 val_86 +27 val_27 +484 val_484 +150 val_150 +128 val_128 +213 val_213 +146 val_146 +281 val_281 +277 val_277 +209 val_209 +394 val_394 +466 val_466 +399 val_399 +489 val_489 +439 val_439 +367 val_367 +475 val_475 +155 val_155 +128 val_128 +489 val_489 +72 val_72 +277 val_277 +399 val_399 +169 val_169 +498 val_498 +286 val_286 +187 val_187 +54 val_54 +213 val_213 +137 val_137 +489 val_489 +353 val_353 +272 val_272 +466 val_466 +411 val_411 +119 val_119 +100 val_100 +191 val_191 +308 val_308 +95 val_95 +196 val_196 +457 val_457 +187 val_187 +470 val_470 +137 val_137 +169 val_169 +77 val_77 +72 val_72 +90 val_90 +277 val_277 +272 val_272 +452 val_452 +402 val_402 +317 val_317 +95 val_95 +498 val_498 +321 val_321 +119 val_119 +489 val_489 +218 val_218 +209 val_209 +367 val_367 +344 val_344 +263 val_263 +191 val_191 +128 val_128 +2 val_2 +321 val_321 +335 val_335 +466 val_466 +105 val_105 +114 val_114 +90 val_90 +164 val_164 +164 val_164 +187 val_187 +119 val_119 +439 val_439 +169 val_169 +443 val_443 +277 val_277 +178 val_178 +317 val_317 +493 val_493 +353 val_353 +407 val_407 +18 val_18 +100 val_100 +498 val_498 +146 val_146 +362 val_362 +18 val_18 +281 val_281 +344 val_344 +448 val_448 +222 val_222 +90 val_90 +169 val_169 +281 val_282 +245 val_246 +425 val_426 +272 val_273 +231 val_232 +448 val_449 +31 val_32 +443 val_444 +371 val_372 +402 val_403 +128 val_129 +240 val_241 +286 val_287 +335 val_336 +367 val_368 +59 val_60 +349 val_350 +123 val_124 +402 val_403 +128 val_129 +63 val_64 +344 val_345 +277 val_278 +114 val_115 +209 val_210 +281 val_282 +114 val_115 +254 val_255 +196 val_197 +86 val_87 +222 val_223 +411 val_412 +281 val_282 +68 val_69 +119 val_120 +376 val_377 +213 val_214 +132 val_133 +191 val_192 +349 val_350 +475 val_476 +399 val_400 +77 val_78 +114 val_115 +443 val_444 +484 val_485 +407 val_408 +367 val_368 +2 val_3 +349 val_350 +164 val_165 +326 val_327 +308 val_309 +119 val_120 +358 val_359 +105 val_106 +416 val_417 +77 val_78 +461 val_462 +40 val_41 +385 val_386 +259 val_260 +303 val_304 +245 val_246 +303 val_304 +277 val_278 +132 val_133 +443 val_444 +399 val_400 +317 val_318 +330 val_331 +209 val_210 +489 val_490 +295 val_296 +105 val_106 +204 val_205 +439 val_440 +457 val_458 +470 val_471 +196 val_197 +119 val_120 +268 val_269 +295 val_296 +407 val_408 +439 val_440 +335 val_336 +164 val_165 +349 val_350 +371 val_372 +123 val_124 +182 val_183 +353 val_354 +371 val_372 +385 val_386 +40 val_41 +259 val_260 +178 val_179 +100 val_101 +22 val_23 +402 val_403 +137 val_138 +196 val_197 +100 val_101 +407 val_408 +371 val_372 +308 val_309 +178 val_179 +457 val_458 +227 val_228 diff --git a/ql/src/test/results/clientpositive/sample6.q.out b/ql/src/test/results/clientpositive/sample6.q.out index 9ca15a68f..f662b8e1a 100644 --- a/ql/src/test/results/clientpositive/sample6.q.out +++ b/ql/src/test/results/clientpositive/sample6.q.out @@ -8,32 +8,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Path -> Alias: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - Path -> Partition: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - Partition - - properties: - name srcbucket - bucket_field_name - serialization.format 1 - columns key,value - field_to_dimension - bucket_count 2 - serialization.lib simple_meta - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - name: srcbucket - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: s Filter Operator predicate: - expr: (((default_sample_hashfn(key) & 2147483647) % 4) = 1) + expr: (((default_sample_hashfn(key) & 2147483647) % 4) = 0) type: Boolean Select Operator expressions: @@ -42,284 +21,318 @@ STAGE PLANS: expr: value type: string File Output Operator + directory: /tmp/hive-njain/391691394/413823912.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - directory: /tmp/hive-zshao/6950986.10000.insclause-0 Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcbucket + serialization.ddl struct srcbucket { string key, string value} + serialization.format 1 + columns key,value + bucket_count 2 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcbucket Stage: Stage-0 Move Operator tables: + replace: + source: /tmp/hive-njain/391691394/413823912.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: - source: /tmp/hive-zshao/6950986.10000.insclause-0 - - -27 val_27 -409 val_409 -265 val_265 -401 val_401 -166 val_166 -430 val_430 -287 val_287 -207 val_207 -199 val_199 -247 val_247 -489 val_489 -162 val_162 -397 val_397 -342 val_342 -195 val_195 -155 val_155 -203 val_203 -339 val_339 -302 val_302 -438 val_438 -489 val_489 -221 val_221 -92 val_92 -111 val_111 -427 val_427 -382 val_382 -386 val_386 -430 val_430 -221 val_221 -137 val_137 -180 val_180 -12 val_12 -67 val_67 -489 val_489 -353 val_353 -272 val_272 -463 val_463 -119 val_119 -496 val_496 -393 val_393 -100 val_100 -298 val_298 -199 val_199 -191 val_191 -96 val_96 -481 val_481 -409 val_409 -470 val_470 -137 val_137 -85 val_85 -364 val_364 -306 val_306 -272 val_272 -331 val_331 -401 val_401 -452 val_452 -177 val_177 -5 val_5 -317 val_317 -34 val_34 -229 val_229 -195 val_195 -430 val_430 -119 val_119 -489 val_489 -78 val_78 -41 val_41 -492 val_492 -449 val_449 -218 val_218 -30 val_30 -74 val_74 -342 val_342 -368 val_368 -485 val_485 -70 val_70 -401 val_401 -191 val_191 -5 val_5 -438 val_438 -467 val_467 -229 val_229 -463 val_463 -283 val_283 -331 val_331 -335 val_335 -104 val_104 -409 val_409 -401 val_401 -258 val_258 -203 val_203 -12 val_12 -478 val_478 -298 val_298 -382 val_382 -5 val_5 -70 val_70 -397 val_397 -104 val_104 -70 val_70 -438 val_438 -119 val_119 -360 val_360 -199 val_199 -478 val_478 -317 val_317 -207 val_207 -265 val_265 -353 val_353 -214 val_214 -133 val_133 -375 val_375 -401 val_401 -67 val_67 -379 val_379 -492 val_492 -100 val_100 -298 val_298 -9 val_9 -126 val_126 -474 val_475 -423 val_424 -177 val_178 -272 val_273 -287 val_288 -126 val_127 -441 val_442 -371 val_372 -243 val_244 -133 val_134 -485 val_486 -85 val_86 -335 val_336 -485 val_486 -393 val_394 -409 val_410 -265 val_266 -63 val_64 -331 val_332 -56 val_57 -254 val_255 -492 val_493 -5 val_6 -438 val_439 -360 val_361 -467 val_468 -16 val_17 -119 val_120 -492 val_493 -306 val_307 -393 val_394 -427 val_428 -386 val_387 -184 val_185 -375 val_376 -191 val_192 -122 val_123 -438 val_439 -23 val_24 -328 val_329 -382 val_383 -342 val_343 -331 val_332 -409 val_410 -463 val_464 -177 val_178 -258 val_259 -104 val_105 -140 val_141 -30 val_31 -405 val_406 -119 val_120 -49 val_50 -416 val_417 -199 val_200 -375 val_376 -89 val_90 -386 val_387 -364 val_365 -89 val_90 -52 val_53 -313 val_314 -324 val_325 -328 val_329 -122 val_123 -104 val_105 -481 val_482 -70 val_71 -52 val_53 -317 val_318 -412 val_413 -16 val_17 -375 val_376 -489 val_490 -126 val_127 -470 val_471 -302 val_303 -119 val_120 -375 val_376 -287 val_288 -375 val_376 -12 val_13 -151 val_152 -52 val_53 -104 val_105 -335 val_336 -371 val_372 -496 val_497 -144 val_145 -382 val_383 -122 val_123 -427 val_428 -474 val_475 -353 val_354 -52 val_53 -261 val_262 -371 val_372 -412 val_413 -49 val_50 -478 val_479 -243 val_244 -151 val_152 -100 val_101 -401 val_402 -137 val_138 -162 val_163 -276 val_277 -78 val_79 -379 val_380 -100 val_101 -371 val_372 -430 val_431 -478 val_479 -1 val_2 -89 val_90 +165 val_165 +484 val_484 +150 val_150 +224 val_224 +66 val_66 +213 val_213 +374 val_374 +495 val_495 +37 val_37 +327 val_327 +15 val_15 +338 val_338 +459 val_459 +466 val_466 +396 val_396 +309 val_309 +367 val_367 +0 val_0 +455 val_455 +316 val_316 +345 val_345 +129 val_129 +378 val_378 +4 val_4 +356 val_356 +169 val_169 +125 val_125 +437 val_437 +286 val_286 +187 val_187 +176 val_176 +459 val_459 +51 val_51 +103 val_103 +239 val_239 +213 val_213 +176 val_176 +275 val_275 +260 val_260 +404 val_404 +217 val_217 +84 val_84 +466 val_466 +8 val_8 +411 val_411 +172 val_172 +129 val_129 +158 val_158 +0 val_0 +26 val_26 +165 val_165 +327 val_327 +51 val_51 +404 val_404 +95 val_95 +282 val_282 +187 val_187 +316 val_316 +169 val_169 +77 val_77 +0 val_0 +118 val_118 +282 val_282 +419 val_419 +15 val_15 +118 val_118 +19 val_19 +224 val_224 +309 val_309 +389 val_389 +327 val_327 +242 val_242 +392 val_392 +242 val_242 +396 val_396 +95 val_95 +11 val_11 +143 val_143 +228 val_228 +33 val_33 +103 val_103 +367 val_367 +239 val_239 +480 val_480 +202 val_202 +316 val_316 +235 val_235 +80 val_80 +44 val_44 +466 val_466 +257 val_257 +190 val_190 +114 val_114 +396 val_396 +217 val_217 +125 val_125 +187 val_187 +480 val_480 +491 val_491 +305 val_305 +444 val_444 +169 val_169 +323 val_323 +480 val_480 +136 val_136 +172 val_172 +462 val_462 +26 val_26 +462 val_462 +341 val_341 +183 val_183 +84 val_84 +37 val_37 +448 val_448 +194 val_194 +477 val_477 +169 val_169 +400 val_400 +62 val_63 +271 val_272 +217 val_218 +455 val_456 +231 val_232 +448 val_449 +246 val_247 +440 val_441 +147 val_148 +356 val_357 +217 val_218 +33 val_34 +15 val_16 +110 val_111 +275 val_276 +0 val_1 +293 val_294 +286 val_287 +408 val_409 +477 val_478 +455 val_456 +99 val_100 +367 val_368 +59 val_60 +48 val_49 +488 val_489 +349 val_350 +11 val_12 +161 val_162 +224 val_225 +4 val_5 +206 val_207 +114 val_115 +239 val_240 +389 val_390 +114 val_115 +235 val_236 +165 val_166 +264 val_265 +11 val_12 +129 val_130 +257 val_258 +323 val_324 +411 val_412 +338 val_339 +147 val_148 +473 val_474 +213 val_214 +426 val_427 +132 val_133 +121 val_122 +129 val_130 +246 val_247 +491 val_492 +352 val_353 +349 val_350 +480 val_481 +480 val_481 +183 val_184 +15 val_16 +392 val_393 +77 val_78 +275 val_276 +114 val_115 +161 val_162 +264 val_265 +48 val_49 +484 val_485 +305 val_306 +367 val_368 +260 val_261 +349 val_350 +51 val_52 +363 val_364 +118 val_119 +19 val_20 +80 val_81 +121 val_122 +239 val_240 +11 val_12 +143 val_144 +341 val_342 +363 val_364 +77 val_78 +293 val_294 +40 val_41 +8 val_9 +206 val_207 +381 val_382 +404 val_405 +206 val_207 +385 val_386 +260 val_261 +404 val_405 +451 val_452 +132 val_133 +356 val_357 +352 val_353 +330 val_331 +257 val_258 +374 val_375 +437 val_438 +334 val_335 +404 val_405 +0 val_1 +268 val_269 +491 val_492 +392 val_393 +341 val_342 +118 val_119 +172 val_173 +136 val_137 +349 val_350 +462 val_463 +408 val_409 +378 val_379 +495 val_496 +385 val_386 +40 val_41 +389 val_390 +125 val_126 +415 val_416 +228 val_229 +363 val_364 +22 val_23 +459 val_460 +62 val_63 +341 val_342 +440 val_441 +309 val_310 +161 val_162 +161 val_162 +91 val_92 +118 val_119 +242 val_243 diff --git a/ql/src/test/results/clientpositive/sample7.q.out b/ql/src/test/results/clientpositive/sample7.q.out index b98ce4c3e..fa1163147 100644 --- a/ql/src/test/results/clientpositive/sample7.q.out +++ b/ql/src/test/results/clientpositive/sample7.q.out @@ -8,32 +8,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Map Reduce - Path -> Alias: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - Path -> Partition: - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - Partition - - properties: - name srcbucket - bucket_field_name - serialization.format 1 - columns key,value - field_to_dimension - bucket_count 2 - serialization.lib simple_meta - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - name: srcbucket - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat Alias -> Map Operator Tree: s Filter Operator predicate: - expr: (((default_sample_hashfn(key) & 2147483647) % 4) = 1) + expr: (((default_sample_hashfn(key) & 2147483647) % 4) = 0) type: Boolean Filter Operator predicate: @@ -46,239 +25,269 @@ STAGE PLANS: expr: value type: string File Output Operator + directory: /tmp/hive-njain/1265167991/144531187.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - directory: /tmp/hive-zshao/484898228.10000.insclause-0 Needs Tagging: + Path -> Alias: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + Path -> Partition: + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + Partition + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + properties: + name srcbucket + serialization.ddl struct srcbucket { string key, string value} + serialization.format 1 + columns key,value + bucket_count 2 + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: srcbucket Stage: Stage-0 Move Operator tables: + replace: + source: /tmp/hive-njain/1265167991/144531187.10000.insclause-0 table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat properties: name dest1 - bucket_field_name + serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 columns key,value SORTBUCKETCOLSPREFIX TRUE - field_to_dimension bucket_count -1 - serialization.lib simple_meta + serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + location file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: dest1 - serde: simple_meta - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: - source: /tmp/hive-zshao/484898228.10000.insclause-0 - - -409 val_409 -265 val_265 -401 val_401 -166 val_166 -430 val_430 -287 val_287 -207 val_207 -199 val_199 -247 val_247 -489 val_489 -162 val_162 -397 val_397 -342 val_342 -195 val_195 -155 val_155 -203 val_203 -339 val_339 -302 val_302 -438 val_438 -489 val_489 -221 val_221 -111 val_111 -427 val_427 -382 val_382 -386 val_386 -430 val_430 -221 val_221 -137 val_137 -180 val_180 -489 val_489 -353 val_353 -272 val_272 -463 val_463 -119 val_119 -496 val_496 -393 val_393 -298 val_298 -199 val_199 -191 val_191 -481 val_481 -409 val_409 -470 val_470 -137 val_137 -364 val_364 -306 val_306 -272 val_272 -331 val_331 -401 val_401 -452 val_452 -177 val_177 -317 val_317 -229 val_229 -195 val_195 -430 val_430 -119 val_119 -489 val_489 -492 val_492 -449 val_449 -218 val_218 -342 val_342 -368 val_368 -485 val_485 -401 val_401 -191 val_191 -438 val_438 -467 val_467 -229 val_229 -463 val_463 -283 val_283 -331 val_331 -335 val_335 -104 val_104 -409 val_409 -401 val_401 -258 val_258 -203 val_203 -478 val_478 -298 val_298 -382 val_382 -397 val_397 -104 val_104 -438 val_438 -119 val_119 -360 val_360 -199 val_199 -478 val_478 -317 val_317 -207 val_207 -265 val_265 -353 val_353 -214 val_214 -133 val_133 -375 val_375 -401 val_401 -379 val_379 -492 val_492 -298 val_298 -126 val_126 -474 val_475 -423 val_424 -177 val_178 -272 val_273 -287 val_288 -126 val_127 -441 val_442 -371 val_372 -243 val_244 -133 val_134 -485 val_486 -335 val_336 -485 val_486 -393 val_394 -409 val_410 -265 val_266 -331 val_332 -254 val_255 -492 val_493 -438 val_439 -360 val_361 -467 val_468 -119 val_120 -492 val_493 -306 val_307 -393 val_394 -427 val_428 -386 val_387 -184 val_185 -375 val_376 -191 val_192 -122 val_123 -438 val_439 -328 val_329 -382 val_383 -342 val_343 -331 val_332 -409 val_410 -463 val_464 -177 val_178 -258 val_259 -104 val_105 -140 val_141 -405 val_406 -119 val_120 -416 val_417 -199 val_200 -375 val_376 -386 val_387 -364 val_365 -313 val_314 -324 val_325 -328 val_329 -122 val_123 -104 val_105 -481 val_482 -317 val_318 -412 val_413 -375 val_376 -489 val_490 -126 val_127 -470 val_471 -302 val_303 -119 val_120 -375 val_376 -287 val_288 -375 val_376 -151 val_152 -104 val_105 -335 val_336 -371 val_372 -496 val_497 -144 val_145 -382 val_383 -122 val_123 -427 val_428 -474 val_475 -353 val_354 -261 val_262 -371 val_372 -412 val_413 -478 val_479 -243 val_244 -151 val_152 -401 val_402 -137 val_138 -162 val_163 -276 val_277 -379 val_380 -371 val_372 -430 val_431 -478 val_479 +165 val_165 +484 val_484 +150 val_150 +224 val_224 +213 val_213 +374 val_374 +495 val_495 +327 val_327 +338 val_338 +459 val_459 +466 val_466 +396 val_396 +309 val_309 +367 val_367 +455 val_455 +316 val_316 +345 val_345 +129 val_129 +378 val_378 +356 val_356 +169 val_169 +125 val_125 +437 val_437 +286 val_286 +187 val_187 +176 val_176 +459 val_459 +103 val_103 +239 val_239 +213 val_213 +176 val_176 +275 val_275 +260 val_260 +404 val_404 +217 val_217 +466 val_466 +411 val_411 +172 val_172 +129 val_129 +158 val_158 +165 val_165 +327 val_327 +404 val_404 +282 val_282 +187 val_187 +316 val_316 +169 val_169 +118 val_118 +282 val_282 +419 val_419 +118 val_118 +224 val_224 +309 val_309 +389 val_389 +327 val_327 +242 val_242 +392 val_392 +242 val_242 +396 val_396 +143 val_143 +228 val_228 +103 val_103 +367 val_367 +239 val_239 +480 val_480 +202 val_202 +316 val_316 +235 val_235 +466 val_466 +257 val_257 +190 val_190 +114 val_114 +396 val_396 +217 val_217 +125 val_125 +187 val_187 +480 val_480 +491 val_491 +305 val_305 +444 val_444 +169 val_169 +323 val_323 +480 val_480 +136 val_136 +172 val_172 +462 val_462 +462 val_462 +341 val_341 +183 val_183 +448 val_448 +194 val_194 +477 val_477 +169 val_169 +400 val_400 +271 val_272 +217 val_218 +455 val_456 +231 val_232 +448 val_449 +246 val_247 +440 val_441 +147 val_148 +356 val_357 +217 val_218 +110 val_111 +275 val_276 +293 val_294 +286 val_287 +408 val_409 +477 val_478 +455 val_456 +367 val_368 +488 val_489 +349 val_350 +161 val_162 +224 val_225 +206 val_207 +114 val_115 +239 val_240 +389 val_390 +114 val_115 +235 val_236 +165 val_166 +264 val_265 +129 val_130 +257 val_258 +323 val_324 +411 val_412 +338 val_339 +147 val_148 +473 val_474 +213 val_214 +426 val_427 +132 val_133 +121 val_122 +129 val_130 +246 val_247 +491 val_492 +352 val_353 +349 val_350 +480 val_481 +480 val_481 +183 val_184 +392 val_393 +275 val_276 +114 val_115 +161 val_162 +264 val_265 +484 val_485 +305 val_306 +367 val_368 +260 val_261 +349 val_350 +363 val_364 +118 val_119 +121 val_122 +239 val_240 +143 val_144 +341 val_342 +363 val_364 +293 val_294 +206 val_207 +381 val_382 +404 val_405 +206 val_207 +385 val_386 +260 val_261 +404 val_405 +451 val_452 +132 val_133 +356 val_357 +352 val_353 +330 val_331 +257 val_258 +374 val_375 +437 val_438 +334 val_335 +404 val_405 +268 val_269 +491 val_492 +392 val_393 +341 val_342 +118 val_119 +172 val_173 +136 val_137 +349 val_350 +462 val_463 +408 val_409 +378 val_379 +495 val_496 +385 val_386 +389 val_390 +125 val_126 +415 val_416 +228 val_229 +363 val_364 +459 val_460 +341 val_342 +440 val_441 +309 val_310 +161 val_162 +161 val_162 +118 val_119 +242 val_243 diff --git a/ql/src/test/results/clientpositive/show_tables.q.out b/ql/src/test/results/clientpositive/show_tables.q.out new file mode 100644 index 000000000..8377e22d7 --- /dev/null +++ b/ql/src/test/results/clientpositive/show_tables.q.out @@ -0,0 +1,28 @@ +ABSTRACT SYNTAX TREE: + (TOK_SHOWTABLES 'shtb_*') + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Show Table Operator: + Show Tables + pattern: shtb_* + + +shtb_test1 shtb_test2 +ABSTRACT SYNTAX TREE: + (TOK_SHOWTABLES 'shtb_test1|shtb_test2') + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Show Table Operator: + Show Tables + pattern: shtb_test1|shtb_test2 + + +shtb_test1 shtb_test2 diff --git a/ql/src/test/results/clientpositive/showparts.q.out b/ql/src/test/results/clientpositive/showparts.q.out new file mode 100644 index 000000000..96087b687 --- /dev/null +++ b/ql/src/test/results/clientpositive/showparts.q.out @@ -0,0 +1,17 @@ +ABSTRACT SYNTAX TREE: + (TOK_SHOWPARTITIONS srcpart) + +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Show Partitions Operator: + Show Partitions + table: srcpart + + +ds=2008-04-08/hr=11 +ds=2008-04-08/hr=12 +ds=2008-04-09/hr=11 +ds=2008-04-09/hr=12 diff --git a/ql/src/test/results/clientpositive/subq.q.out b/ql/src/test/results/clientpositive/subq.q.out index ea456b16c..80c89fd36 100644 --- a/ql/src/test/results/clientpositive/subq.q.out +++ b/ql/src/test/results/clientpositive/subq.q.out @@ -34,8 +34,8 @@ STAGE PLANS: Stage: Stage-0 Move Operator files: - destination: ../../../../build/contrib/hive/ql/test/data/warehouse/union.out hdfs directory: + destination: ../../../../build/contrib/hive/ql/test/data/warehouse/union.out 86val_86 diff --git a/ql/src/test/results/clientpositive/subq2.q.out b/ql/src/test/results/clientpositive/subq2.q.out new file mode 100644 index 000000000..b722d56e6 --- /dev/null +++ b/ql/src/test/results/clientpositive/subq2.q.out @@ -0,0 +1,340 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src b)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF b key) k) (TOK_SELEXPR (TOK_FUNCTION count 1) c)) (TOK_GROUPBY (TOK_COLREF b key)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF a k)) (TOK_SELEXPR (TOK_COLREF a c))) (TOK_WHERE (>= (TOK_COLREF a k) 90)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a:b + Reduce Output Operator + key expressions: + expr: key + type: string + # partition fields: -1 + tag: -1 + value expressions: + expr: 1 + type: int + Reduce Operator Tree: + Group By Operator + + expr: count(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/57219117/75379653.10002 + Reduce Output Operator + key expressions: + expr: 0 + type: string + # partition fields: 1 + tag: -1 + value expressions: + expr: 1 + type: string + Reduce Operator Tree: + Group By Operator + + expr: count(VALUE.0) + keys: + expr: KEY.0 + type: string + mode: partial2 + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + Filter Operator + predicate: + expr: (0 >= 90) + type: Boolean + Select Operator + expressions: + expr: 0 + type: string + expr: 1 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +100 2 +103 2 +104 2 +105 1 +111 1 +113 2 +114 1 +116 1 +118 2 +119 3 +120 2 +125 2 +126 1 +128 3 +129 2 +131 1 +133 1 +134 2 +136 1 +137 2 +138 4 +143 1 +145 1 +146 2 +149 2 +150 1 +152 2 +153 1 +155 1 +156 1 +157 1 +158 1 +160 1 +162 1 +163 1 +164 2 +165 2 +166 1 +167 3 +168 1 +169 4 +170 1 +172 2 +174 2 +175 2 +176 2 +177 1 +178 1 +179 2 +180 1 +181 1 +183 1 +186 1 +187 3 +189 1 +190 1 +191 2 +192 1 +193 3 +194 1 +195 2 +196 1 +197 2 +199 3 +200 2 +201 1 +202 1 +203 2 +205 2 +207 2 +208 3 +209 2 +213 2 +214 1 +216 2 +217 2 +218 1 +219 2 +221 2 +222 1 +223 2 +224 2 +226 1 +228 1 +229 2 +230 5 +233 2 +235 1 +237 2 +238 2 +239 2 +241 1 +242 2 +244 1 +247 1 +248 1 +249 1 +252 1 +255 2 +256 2 +257 1 +258 1 +260 1 +262 1 +263 1 +265 2 +266 1 +272 2 +273 3 +274 1 +275 1 +277 4 +278 2 +280 2 +281 2 +282 2 +283 1 +284 1 +285 1 +286 1 +287 1 +288 2 +289 1 +291 1 +292 1 +296 1 +298 3 +302 1 +305 1 +306 1 +307 2 +308 1 +309 2 +310 1 +311 3 +315 1 +316 3 +317 2 +318 3 +321 2 +322 2 +323 1 +325 2 +327 3 +331 2 +332 1 +333 2 +335 1 +336 1 +338 1 +339 1 +341 1 +342 2 +344 2 +345 1 +348 5 +351 1 +353 2 +356 1 +360 1 +362 1 +364 1 +365 1 +366 1 +367 2 +368 1 +369 3 +373 1 +374 1 +375 1 +377 1 +378 1 +379 1 +382 2 +384 3 +386 1 +389 1 +392 1 +393 1 +394 1 +395 2 +396 3 +397 2 +399 2 +400 1 +401 5 +402 1 +403 3 +404 2 +406 4 +407 1 +409 3 +411 1 +413 2 +414 2 +417 3 +418 1 +419 1 +421 1 +424 2 +427 1 +429 2 +430 3 +431 3 +432 1 +435 1 +436 1 +437 1 +438 3 +439 2 +443 1 +444 1 +446 1 +448 1 +449 1 +452 1 +453 1 +454 3 +455 1 +457 1 +458 2 +459 2 +460 1 +462 2 +463 2 +466 3 +467 1 +468 4 +469 5 +470 1 +472 1 +475 1 +477 1 +478 2 +479 1 +480 3 +481 1 +482 1 +483 1 +484 1 +485 1 +487 1 +489 4 +490 1 +491 1 +492 2 +493 1 +494 1 +495 1 +496 1 +497 1 +498 3 +90 3 +92 1 +95 2 +96 1 +97 2 +98 2 diff --git a/ql/src/test/results/clientpositive/udf1.q.out b/ql/src/test/results/clientpositive/udf1.q.out index 6fea10d2b..d07ec125d 100644 --- a/ql/src/test/results/clientpositive/udf1.q.out +++ b/ql/src/test/results/clientpositive/udf1.q.out @@ -52,21 +52,20 @@ STAGE PLANS: type: string File Output Operator table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 Stage: Stage-0 Move Operator tables: + replace: table: - name: dest1 - serde: simple_meta input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - replace: + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 true false true true true false false false true true false true false acc abc abb hive - diff --git a/ql/src/test/results/clientpositive/udf2.q.out b/ql/src/test/results/clientpositive/udf2.q.out index 0c0156ff1..14a0adc5c 100644 --- a/ql/src/test/results/clientpositive/udf2.q.out +++ b/ql/src/test/results/clientpositive/udf2.q.out @@ -1,509 +1,39 @@ -ABSTRACT SYNTAX TREE: - (TOK_CREATEFUNCTION testlength 'org.apache.hadoop.hive.ql.udf.UDFTestLength') - -STAGE DEPENDENCIES: - Stage-0 is a root stage - -STAGE PLANS: - Stage: Stage-0 - -7 -6 -7 -6 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -5 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -6 -7 -6 -6 -5 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -6 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -6 -5 -7 -7 -7 -7 -6 -7 -7 -7 -7 -6 -7 -7 -7 -7 -5 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -6 -7 -7 -7 -7 -7 -7 -6 -7 -6 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -6 -5 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -6 -6 -7 -6 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -5 -7 -7 -7 -7 -7 -6 -6 -7 -6 -6 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -6 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -6 -7 -6 -6 -7 -6 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -5 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -5 -6 -7 -7 -7 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -5 -6 -7 -7 -7 -6 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -6 -7 -7 -6 -7 -7 -7 -7 -5 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -6 -7 -7 -6 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -6 -7 -7 -7 -7 -6 +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION trim (TOK_COLREF dest1 c1))) (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION rtrim (TOK_COLREF dest1 c1))) (TOK_SELEXPR '|') (TOK_SELEXPR (TOK_FUNCTION ltrim (TOK_COLREF dest1 c1))) (TOK_SELEXPR '|')))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + dest1 + Select Operator + expressions: + expr: '|' + type: string + expr: trim(c1) + type: string + expr: '|' + type: string + expr: rtrim(c1) + type: string + expr: '|' + type: string + expr: ltrim(c1) + type: string + expr: '|' + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +| abc | abc | abc | diff --git a/ql/src/test/results/clientpositive/udf3.q.out b/ql/src/test/results/clientpositive/udf3.q.out new file mode 100644 index 000000000..ec400ce28 --- /dev/null +++ b/ql/src/test/results/clientpositive/udf3.q.out @@ -0,0 +1,91 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count (TOK_FUNCTION TOK_INT ''))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION TOK_INT ''))) (TOK_SELEXPR (TOK_FUNCTION avg (TOK_FUNCTION TOK_INT ''))) (TOK_SELEXPR (TOK_FUNCTION min (TOK_FUNCTION TOK_INT ''))) (TOK_SELEXPR (TOK_FUNCTION max (TOK_FUNCTION TOK_INT '')))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Reduce Output Operator + # partition fields: -1 + tag: -1 + value expressions: + expr: UDFToInteger('') + type: int + Reduce Operator Tree: + Group By Operator + + expr: max(VALUE.0) + expr: avg(VALUE.0) + expr: count(VALUE.0) + expr: sum(VALUE.0) + expr: min(VALUE.0) + mode: partial1 + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + /tmp/hive-njain/159338588/98912587.10001 + Reduce Output Operator + # partition fields: 0 + tag: -1 + value expressions: + expr: 0 + type: string + expr: 1 + type: string + expr: 2 + type: string + expr: 3 + type: string + expr: 4 + type: string + Reduce Operator Tree: + Group By Operator + + expr: max(VALUE.0) + expr: avg(VALUE.1) + expr: count(VALUE.2) + expr: sum(VALUE.3) + expr: min(VALUE.4) + mode: partial2 + Select Operator + expressions: + expr: 2 + type: string + expr: 3 + type: string + expr: 1 + type: string + expr: 4 + type: string + expr: 0 + type: string + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + Stage: Stage-0 + Move Operator + tables: + replace: + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + name: dest1 + + +0 NULL NULL NULL NULL diff --git a/ql/src/test/results/clientpositive/udf4.q.out b/ql/src/test/results/clientpositive/udf4.q.out new file mode 100644 index 000000000..ed176c8c6 --- /dev/null +++ b/ql/src/test/results/clientpositive/udf4.q.out @@ -0,0 +1,57 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION round 1.0)) (TOK_SELEXPR (TOK_FUNCTION round 1.5)) (TOK_SELEXPR (TOK_FUNCTION round (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION floor 1.0)) (TOK_SELEXPR (TOK_FUNCTION floor 1.5)) (TOK_SELEXPR (TOK_FUNCTION floor (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION ceil 1.0)) (TOK_SELEXPR (TOK_FUNCTION ceil 1.5)) (TOK_SELEXPR (TOK_FUNCTION ceil (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION ceiling 1.0)) (TOK_SELEXPR (TOK_FUNCTION rand 3)) (TOK_SELEXPR (+ 3)) (TOK_SELEXPR (- 3)) (TOK_SELEXPR (+ 1 (+ 2))) (TOK_SELEXPR (+ 1 (- 2))) (TOK_SELEXPR (~ 1))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + dest1 + Select Operator + expressions: + expr: round(1.0) + type: bigint + expr: round(1.5) + type: bigint + expr: round(- 1.5) + type: bigint + expr: floor(1.0) + type: bigint + expr: floor(1.5) + type: bigint + expr: floor(- 1.5) + type: bigint + expr: ceiling(1.0) + type: bigint + expr: ceiling(1.5) + type: bigint + expr: ceiling(- 1.5) + type: bigint + expr: ceiling(1.0) + type: bigint + expr: rand(UDFToLong(3)) + type: double + expr: 3 + type: int + expr: - 3 + type: int + expr: (1 + 2) + type: int + expr: (1 + - 2) + type: int + expr: ~ 1 + type: int + File Output Operator + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +1 2 -1 1 1 -2 1 2 -1 1 0.731057369148862 3 -3 3 -1 -2 diff --git a/ql/src/test/results/clientpositive/union.q.out b/ql/src/test/results/clientpositive/union.q.out index bba82bba3..a3c9a73a4 100644 --- a/ql/src/test/results/clientpositive/union.q.out +++ b/ql/src/test/results/clientpositive/union.q.out @@ -55,8 +55,8 @@ STAGE PLANS: Stage: Stage-0 Move Operator files: - destination: ../../../../build/contrib/hive/ql/test/data/warehouse/union.out hdfs directory: + destination: ../../../../build/contrib/hive/ql/test/data/warehouse/union.out 238val_238 diff --git a/ql/src/test/results/compiler/errors/invalid_dot.q.out b/ql/src/test/results/compiler/errors/invalid_dot.q.out new file mode 100644 index 000000000..13f4162c8 --- /dev/null +++ b/ql/src/test/results/compiler/errors/invalid_dot.q.out @@ -0,0 +1,2 @@ +Semantic Exception: +line 2:36 . operator is only supported on struct or list of struct types member \ No newline at end of file diff --git a/ql/src/test/results/compiler/errors/invalid_index.q.out b/ql/src/test/results/compiler/errors/invalid_index.q.out new file mode 100644 index 000000000..fa7e91785 --- /dev/null +++ b/ql/src/test/results/compiler/errors/invalid_index.q.out @@ -0,0 +1,2 @@ +Semantic Exception: +line 2:36 [] not Valid on Non Collection Types 0 \ No newline at end of file diff --git a/ql/src/test/results/compiler/errors/invalid_list_index.q.out b/ql/src/test/results/compiler/errors/invalid_list_index.q.out new file mode 100644 index 000000000..e72f2a60b --- /dev/null +++ b/ql/src/test/results/compiler/errors/invalid_list_index.q.out @@ -0,0 +1,2 @@ +Semantic Exception: +line 2:56 Non Constant Expressions for Array Indexes not Supported 'abc' \ No newline at end of file diff --git a/ql/src/test/results/compiler/errors/invalid_list_index2.q.out b/ql/src/test/results/compiler/errors/invalid_list_index2.q.out new file mode 100644 index 000000000..0531892ec --- /dev/null +++ b/ql/src/test/results/compiler/errors/invalid_list_index2.q.out @@ -0,0 +1,2 @@ +Semantic Exception: +line 2:56 Non Constant Expressions for Array Indexes not Supported 2 \ No newline at end of file diff --git a/ql/src/test/results/compiler/errors/invalid_map_index.q.out b/ql/src/test/results/compiler/errors/invalid_map_index.q.out new file mode 100644 index 000000000..1d4975817 --- /dev/null +++ b/ql/src/test/results/compiler/errors/invalid_map_index.q.out @@ -0,0 +1,2 @@ +Semantic Exception: +line 2:56 Map Key Type does not Match Index Expression Type 0 \ No newline at end of file diff --git a/ql/src/test/results/compiler/errors/invalid_map_index2.q.out b/ql/src/test/results/compiler/errors/invalid_map_index2.q.out new file mode 100644 index 000000000..f873288b0 --- /dev/null +++ b/ql/src/test/results/compiler/errors/invalid_map_index2.q.out @@ -0,0 +1,2 @@ +Semantic Exception: +line 2:56 Non Constant Expression for Map Indexes not Supported 'abc' \ No newline at end of file diff --git a/ql/src/test/results/compiler/parse/case_sensitivity.q.out b/ql/src/test/results/compiler/parse/case_sensitivity.q.out new file mode 100644 index 000000000..d3f5571df --- /dev/null +++ b/ql/src/test/results/compiler/parse/case_sensitivity.q.out @@ -0,0 +1 @@ +(TOK_QUERY (TOK_FROM (TOK_TABREF SRC_THRIFT)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_Thrift LINT) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) MYSTRING))) (TOK_WHERE (> ([ (TOK_COLREF src_thrift liNT) 0) 0)))) null \ No newline at end of file diff --git a/ql/src/test/results/compiler/parse/input_testxpath.q.out b/ql/src/test/results/compiler/parse/input_testxpath.q.out index 7e5ce6f73..178213b35 100644 --- a/ql/src/test/results/compiler/parse/input_testxpath.q.out +++ b/ql/src/test/results/compiler/parse/input_testxpath.q.out @@ -1 +1 @@ -(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_thrift lint) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) mystring))))) null \ No newline at end of file +(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_thrift lint) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) mystring)) (TOK_SELEXPR ([ (TOK_COLREF src_thrift mstringstring) 'key_2'))))) null \ No newline at end of file diff --git a/ql/src/test/results/compiler/parse/input_testxpath2.q.out b/ql/src/test/results/compiler/parse/input_testxpath2.q.out new file mode 100644 index 000000000..4a5512883 --- /dev/null +++ b/ql/src/test/results/compiler/parse/input_testxpath2.q.out @@ -0,0 +1 @@ +(TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift lint))) (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift lintstring))) (TOK_SELEXPR (TOK_FUNCTION size (TOK_COLREF src_thrift mstringstring)))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (TOK_COLREF src_thrift lint)) (NOT (TOK_FUNCTION TOK_ISNULL (TOK_COLREF src_thrift mstringstring))))))) null \ No newline at end of file diff --git a/ql/src/test/results/compiler/parse/udf4.q.out b/ql/src/test/results/compiler/parse/udf4.q.out new file mode 100644 index 000000000..bc3da974f --- /dev/null +++ b/ql/src/test/results/compiler/parse/udf4.q.out @@ -0,0 +1 @@ +(TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION round 1.0)) (TOK_SELEXPR (TOK_FUNCTION round 1.5)) (TOK_SELEXPR (TOK_FUNCTION round (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION floor 1.0)) (TOK_SELEXPR (TOK_FUNCTION floor 1.5)) (TOK_SELEXPR (TOK_FUNCTION floor (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION ceil 1.0)) (TOK_SELEXPR (TOK_FUNCTION ceil 1.5)) (TOK_SELEXPR (TOK_FUNCTION ceil (- 1.5))) (TOK_SELEXPR (TOK_FUNCTION ceiling 1.0)) (TOK_SELEXPR (TOK_FUNCTION rand 3)) (TOK_SELEXPR (+ 3)) (TOK_SELEXPR (- 3)) (TOK_SELEXPR (+ 1 (+ 2))) (TOK_SELEXPR (+ 1 (- 2))) (TOK_SELEXPR (~ 1))))) null \ No newline at end of file diff --git a/ql/src/test/results/compiler/plan/case_sensitivity.q.xml b/ql/src/test/results/compiler/plan/case_sensitivity.q.xml new file mode 100644 index 000000000..a99ecfd18 --- /dev/null +++ b/ql/src/test/results/compiler/plan/case_sensitivity.q.xml @@ -0,0 +1,521 @@ + + + + + + + + + Stage-1 + + + + + + + + + + + + + + + + + + + + + + true + + + /tmp/hive-zshao/111275125/273788462.10000.insclause-0 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + name + dest1 + + + serialization.ddl + struct dest1 { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + file.outputformat + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + location + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + + + + + + + + + + + + + Stage-2 + + + + + + + src_thrift + + + + + + + + + + + + + + + + + /tmp/hive-zshao/111275125/273788462.10000.insclause-0 + + + + + + + + + + + + + + 0 + + + + + java.lang.Integer + + + + + + + + + 1 + + + + + java.lang.String + + + + + + + + + + + + + + + + + + + + + + + lint + + + + + + + + + + + + + + + + + 1 + + + + + + + + + + + + + + + + lintstring + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + MYSTRING + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan + + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + + + + + + + + + + + lint + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + 0 + + + + + + + + + java.lang.Boolean + + + + + + + + + + + + + + + aint + + + + + + + + + + astring + + + + + + + + + + lint + + + + + + + + + + lstring + + + + + + + + + + + + + + lintstring + + + + + + + + + + mstringstring + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + + + src_thrift + + + + + + + + + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + + + + + + + + org.apache.hadoop.hive.serde2.ThriftDeserializer + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + src_thrift + + + serialization.ddl + struct src_thrift { } + + + columns + + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + serialization.class + org.apache.hadoop.hive.serde2.thrift.test.Complex + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.ThriftDeserializer + + + file.inputformat + org.apache.hadoop.mapred.SequenceFileInputFormat + + + file.outputformat + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + location + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + + + + + org.apache.hadoop.hive.serde2.ThriftDeserializer + + + + + + + + + + + diff --git a/ql/src/test/results/compiler/plan/cast1.q.xml b/ql/src/test/results/compiler/plan/cast1.q.xml index 06e2ee8c7..9e2a5687d 100644 --- a/ql/src/test/results/compiler/plan/cast1.q.xml +++ b/ql/src/test/results/compiler/plan/cast1.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/920573802.10000.insclause-0 + /tmp/hive-zshao/783071627/588980298.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/920573802.10000.insclause-0 + /tmp/hive-zshao/783071627/588980298.10000.insclause-0 @@ -147,7 +143,7 @@ - java.lang.String + java.lang.Integer @@ -159,7 +155,11 @@ 1 - + + + java.lang.Double + + @@ -169,7 +169,7 @@ 2 - + @@ -179,7 +179,7 @@ 3 - + @@ -199,7 +199,11 @@ 5 - + + + java.lang.Boolean + + @@ -248,11 +252,7 @@ - - - java.lang.Integer - - + 3 @@ -262,7 +262,7 @@ - + 2 @@ -272,7 +272,7 @@ - + @@ -299,11 +299,7 @@ - - - java.lang.Double - - + 3.0 @@ -330,7 +326,7 @@ - + 2 @@ -340,14 +336,14 @@ - + - + @@ -391,7 +387,7 @@ - + 3 @@ -401,14 +397,14 @@ - + - + 2.0 @@ -418,7 +414,7 @@ - + @@ -445,7 +441,7 @@ - + 3.0 @@ -455,7 +451,7 @@ - + 2.0 @@ -465,7 +461,7 @@ - + @@ -492,7 +488,7 @@ - + 3 @@ -519,7 +515,7 @@ - + 2.0 @@ -529,14 +525,14 @@ - + - + @@ -560,7 +556,7 @@ - + 1 @@ -570,11 +566,7 @@ - - - java.lang.Boolean - - + @@ -598,7 +590,7 @@ - + true @@ -608,7 +600,7 @@ - + @@ -655,14 +647,18 @@ key - + + + java.lang.String + + - + 86 @@ -672,7 +668,7 @@ - + @@ -688,7 +684,7 @@ key - + @@ -698,7 +694,7 @@ value - + @@ -724,7 +720,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -736,7 +732,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -759,8 +755,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -770,17 +766,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -792,12 +784,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/groupby1.q.xml b/ql/src/test/results/compiler/plan/groupby1.q.xml index 26b97bc45..c245fde71 100755 --- a/ql/src/test/results/compiler/plan/groupby1.q.xml +++ b/ql/src/test/results/compiler/plan/groupby1.q.xml @@ -35,7 +35,7 @@ true - /tmp/hive-zshao/782548333.10000.insclause-0 + /tmp/hive-zshao/1211327466/29184745.10000.insclause-0 @@ -55,8 +55,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -66,17 +66,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -88,12 +84,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -122,10 +118,13 @@ - /tmp/hive-zshao/782548333.10001 + /tmp/hive-zshao/1211327466/29184745.10001 + + true + @@ -172,6 +171,9 @@ 1 + + -1 + -1 @@ -248,13 +250,16 @@ + + true + - /tmp/hive-zshao/782548333.10001 + /tmp/hive-zshao/1211327466/29184745.10001 - /tmp/hive-zshao/782548333.10001 + /tmp/hive-zshao/1211327466/29184745.10001 @@ -263,7 +268,7 @@ - /tmp/hive-zshao/782548333.10001 + /tmp/hive-zshao/1211327466/29184745.10001 @@ -307,7 +312,7 @@ - /tmp/hive-zshao/782548333.10000.insclause-0 + /tmp/hive-zshao/1211327466/29184745.10000.insclause-0 @@ -488,16 +493,12 @@ - - - - - java.lang.Integer - - + + + key - - 0 + + @@ -531,41 +532,14 @@ -1 + + -1 + -1 - - - - key - - - - - - - - - - value - - - - - - - - - - key - - - - - - @@ -599,7 +573,11 @@ - + + + java.lang.Integer + + 4 @@ -630,7 +608,7 @@ columns - 0,1,2,3 + 0 serialization.format @@ -645,31 +623,11 @@ - - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - + - VALUE.2 + KEY.0 @@ -679,7 +637,7 @@ - VALUE.3 + VALUE.0 @@ -729,7 +687,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -741,7 +699,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -764,8 +722,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -775,17 +733,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -797,12 +751,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -811,50 +765,42 @@ - + - - - - - - - - - /tmp/hive-zshao/782548333.10001 + + + + + /tmp/hive-zshao/1211327466/29184745.10001 + + + + + + + + + + + + + + 0 - - + + - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - + + + + 1 + + + @@ -862,39 +808,25 @@ - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFSum - - - - - - - VALUE.3 - - - - - - - - - - - + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFSum - + - VALUE.2 + VALUE.0 @@ -903,26 +835,33 @@ - - - PARTIAL1 - - - - + + + + + + + + KEY.0 + + + + + + + + PARTIAL1 + + - - - - - + diff --git a/ql/src/test/results/compiler/plan/groupby2.q.xml b/ql/src/test/results/compiler/plan/groupby2.q.xml index c814fdb1e..959c27375 100755 --- a/ql/src/test/results/compiler/plan/groupby2.q.xml +++ b/ql/src/test/results/compiler/plan/groupby2.q.xml @@ -35,7 +35,7 @@ true - /tmp/hive-zshao/708847402.10000.insclause-0 + /tmp/hive-zshao/62629504/152527911.10000.insclause-0 @@ -55,8 +55,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -66,17 +66,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -88,12 +84,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -122,10 +118,13 @@ - /tmp/hive-zshao/708847402.10001 + /tmp/hive-zshao/62629504/152527911.10001 + + true + @@ -172,6 +171,9 @@ 1 + + -1 + -1 @@ -268,13 +270,16 @@ + + true + - /tmp/hive-zshao/708847402.10001 + /tmp/hive-zshao/62629504/152527911.10001 - /tmp/hive-zshao/708847402.10001 + /tmp/hive-zshao/62629504/152527911.10001 @@ -283,7 +288,7 @@ - /tmp/hive-zshao/708847402.10001 + /tmp/hive-zshao/62629504/152527911.10001 @@ -327,7 +332,7 @@ - /tmp/hive-zshao/708847402.10000.insclause-0 + /tmp/hive-zshao/62629504/152527911.10000.insclause-0 @@ -603,13 +608,16 @@ evaluate - + java.lang.String int + + int + @@ -618,7 +626,7 @@ - value + key @@ -635,7 +643,17 @@ - 4 + 0 + + + + + + + + + + 1 @@ -646,61 +664,6 @@ - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - 1 - - - -1 - - - - - - - key - - - - - - - - - - value - - - - - - @@ -709,16 +672,13 @@ evaluate - + java.lang.String int - - int - @@ -727,7 +687,7 @@ - key + value @@ -740,17 +700,7 @@ - 0 - - - - - - - - - - 1 + 4 @@ -763,6 +713,43 @@ + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + 0,1 + + + serialization.format + 1 + + + + + + + 2147483647 + + + -1 + + + -1 + + + + @@ -778,7 +765,7 @@ columns - 0,1,2 + serialization.format @@ -793,7 +780,7 @@ - + @@ -807,27 +794,7 @@ - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - - - - - VALUE.2 + KEY.1 @@ -877,7 +844,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -889,7 +856,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -912,8 +879,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -923,17 +890,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -945,12 +908,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -959,124 +922,102 @@ - + - - - - - - - - - /tmp/hive-zshao/708847402.10001 + + + + + /tmp/hive-zshao/62629504/152527911.10001 + + + + + + + + + + + + + + 0 - - + + - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - 2 - - - - - - - + + + + 1 + + + - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFCount - - - true + + + 2 - - - - - - KEY.0 - - - - - - - + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFCount + + + true + + + - - - org.apache.hadoop.hive.ql.udf.UDAFSum + + + KEY.1 - - - - - - KEY.0 - - - - - - - + + - + + + + + + org.apache.hadoop.hive.ql.udf.UDAFSum + + - VALUE.2 + KEY.1 @@ -1085,26 +1026,33 @@ - - - PARTIAL1 - - - - + + + + + + + + KEY.0 + + + + + + + + PARTIAL1 + + - - - - - + diff --git a/ql/src/test/results/compiler/plan/groupby3.q.xml b/ql/src/test/results/compiler/plan/groupby3.q.xml index 6c20bc3bf..d0da7470c 100644 --- a/ql/src/test/results/compiler/plan/groupby3.q.xml +++ b/ql/src/test/results/compiler/plan/groupby3.q.xml @@ -35,7 +35,7 @@ true - /tmp/hive-zshao/399409523.10000.insclause-0 + /tmp/hive-zshao/168733871/318815163.10000.insclause-0 @@ -55,8 +55,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -66,17 +66,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -88,12 +84,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -122,10 +118,13 @@ - /tmp/hive-zshao/399409523.10001 + /tmp/hive-zshao/168733871/318815163.10001 + + true + @@ -154,6 +153,9 @@ + + -1 + -1 @@ -304,13 +306,16 @@ + + true + - /tmp/hive-zshao/399409523.10001 + /tmp/hive-zshao/168733871/318815163.10001 - /tmp/hive-zshao/399409523.10001 + /tmp/hive-zshao/168733871/318815163.10001 @@ -319,7 +324,7 @@ - /tmp/hive-zshao/399409523.10001 + /tmp/hive-zshao/168733871/318815163.10001 @@ -363,7 +368,7 @@ - /tmp/hive-zshao/399409523.10000.insclause-0 + /tmp/hive-zshao/168733871/318815163.10000.insclause-0 @@ -785,34 +790,16 @@ - 1 + 2147483647 + + + -1 -1 - - - - - key - - - - - - - - - - value - - - - - - - + @@ -829,7 +816,7 @@ columns - 0,1 + serialization.format @@ -844,7 +831,7 @@ - + @@ -855,26 +842,6 @@ - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - @@ -918,7 +885,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -930,7 +897,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -953,8 +920,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -964,17 +931,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -986,12 +949,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -1000,224 +963,209 @@ - + - - - - - - - - - /tmp/hive-zshao/399409523.10001 + + + + + /tmp/hive-zshao/168733871/318815163.10001 + + + + + + + + + + + + + + 0 - - + + - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - - - - 2 - - - - - - - - - - 3 - - - - - - - - - - 4 - - - - - - - + + + + 1 + + + - - - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFAvg + + + 2 - - true + + - - - - - - KEY.0 - - - - - - - + + + + + + 3 + + + - - - org.apache.hadoop.hive.ql.udf.UDAFSum + + + 4 - - - - - - KEY.0 - - - - - - - + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFAvg + + + true + + + - - - org.apache.hadoop.hive.ql.udf.UDAFAvg + + + KEY.0 - - - - - - KEY.0 - - - - - - - + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFSum + + + - - - org.apache.hadoop.hive.ql.udf.UDAFMin + + + KEY.0 - - - - - - KEY.0 - - - - - - - + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFAvg + + + - - - org.apache.hadoop.hive.ql.udf.UDAFMax + + + KEY.0 - - - - - - KEY.0 - - - - - - - + + - - + + + + + + org.apache.hadoop.hive.ql.udf.UDAFMin - - - PARTIAL1 + + + + + + KEY.0 + + + + + + - - + + + + org.apache.hadoop.hive.ql.udf.UDAFMax + + + + + + + KEY.0 + + + + + + + + + + + + + + + PARTIAL1 + + - - - - - + diff --git a/ql/src/test/results/compiler/plan/groupby4.q.xml b/ql/src/test/results/compiler/plan/groupby4.q.xml index cf33fa32e..2370ced46 100644 --- a/ql/src/test/results/compiler/plan/groupby4.q.xml +++ b/ql/src/test/results/compiler/plan/groupby4.q.xml @@ -35,7 +35,7 @@ true - /tmp/hive-zshao/190015277.10000.insclause-0 + /tmp/hive-zshao/843671827/38838214.10000.insclause-0 @@ -55,8 +55,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -66,17 +66,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -88,12 +84,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -122,10 +118,13 @@ - /tmp/hive-zshao/190015277.10001 + /tmp/hive-zshao/843671827/38838214.10001 + + true + @@ -172,6 +171,9 @@ 1 + + -1 + -1 @@ -227,13 +229,16 @@ + + true + - /tmp/hive-zshao/190015277.10001 + /tmp/hive-zshao/843671827/38838214.10001 - /tmp/hive-zshao/190015277.10001 + /tmp/hive-zshao/843671827/38838214.10001 @@ -242,7 +247,7 @@ - /tmp/hive-zshao/190015277.10001 + /tmp/hive-zshao/843671827/38838214.10001 @@ -286,7 +291,7 @@ - /tmp/hive-zshao/190015277.10000.insclause-0 + /tmp/hive-zshao/843671827/38838214.10000.insclause-0 @@ -414,75 +419,6 @@ - - - - - - java.lang.Integer - - - - - 0 - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - -1 - - - -1 - - - - - - - key - - - - - - - - - - value - - - - - - @@ -519,7 +455,11 @@ - + + + java.lang.Integer + + 0 @@ -545,6 +485,43 @@ + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + 0 + + + serialization.format + 1 + + + + + + + -1 + + + -1 + + + -1 + + + + @@ -560,7 +537,7 @@ columns - 0,1,2 + serialization.format @@ -575,31 +552,11 @@ - - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - + - VALUE.2 + KEY.0 @@ -649,7 +606,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -661,7 +618,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -684,8 +641,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -695,17 +652,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -717,12 +670,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -731,87 +684,72 @@ - + - - - - - - - - - /tmp/hive-zshao/190015277.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - - + + + + + /tmp/hive-zshao/843671827/38838214.10001 + + + - - - - - - - + + + + - - - VALUE.2 + + + 0 - + - - - PARTIAL1 - - - - - - - - - + + + + + + + + + + + KEY.0 + + + + + + + + + + + PARTIAL1 + + + + diff --git a/ql/src/test/results/compiler/plan/groupby5.q.xml b/ql/src/test/results/compiler/plan/groupby5.q.xml index 847d31829..d55f92ce4 100644 --- a/ql/src/test/results/compiler/plan/groupby5.q.xml +++ b/ql/src/test/results/compiler/plan/groupby5.q.xml @@ -35,7 +35,7 @@ true - /tmp/hive-zshao/1485368551.10000.insclause-0 + /tmp/hive-zshao/218070299/172646370.10000.insclause-0 @@ -55,8 +55,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -66,17 +66,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -88,12 +84,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -122,10 +118,13 @@ - /tmp/hive-zshao/1485368551.10001 + /tmp/hive-zshao/218070299/172646370.10001 + + true + @@ -172,6 +171,9 @@ 1 + + -1 + -1 @@ -248,13 +250,16 @@ + + true + - /tmp/hive-zshao/1485368551.10001 + /tmp/hive-zshao/218070299/172646370.10001 - /tmp/hive-zshao/1485368551.10001 + /tmp/hive-zshao/218070299/172646370.10001 @@ -263,7 +268,7 @@ - /tmp/hive-zshao/1485368551.10001 + /tmp/hive-zshao/218070299/172646370.10001 @@ -307,7 +312,7 @@ - /tmp/hive-zshao/1485368551.10000.insclause-0 + /tmp/hive-zshao/218070299/172646370.10000.insclause-0 @@ -488,16 +493,12 @@ - - - - - java.lang.Integer - - + + + key - - 0 + + @@ -531,41 +532,14 @@ -1 + + -1 + -1 - - - - key - - - - - - - - - - value - - - - - - - - - - key - - - - - - @@ -599,7 +573,11 @@ - + + + java.lang.Integer + + 4 @@ -630,7 +608,7 @@ columns - 0,1,2,3 + 0 serialization.format @@ -645,31 +623,11 @@ - - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - + - VALUE.2 + KEY.0 @@ -679,7 +637,7 @@ - VALUE.3 + VALUE.0 @@ -729,7 +687,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -741,7 +699,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -764,8 +722,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -775,17 +733,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -797,12 +751,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -811,50 +765,42 @@ - + - - - - - - - - - /tmp/hive-zshao/1485368551.10001 + + + + + /tmp/hive-zshao/218070299/172646370.10001 + + + + + + + + + + + + + + 0 - - + + - - - - - - - - 0 - - - - - - - - - - 1 - - - - - - - + + + + 1 + + + @@ -862,39 +808,25 @@ - - - - - - - - org.apache.hadoop.hive.ql.udf.UDAFSum - - - - - - - VALUE.3 - - - - - - - - - - - + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDAFSum - + - VALUE.2 + VALUE.0 @@ -903,26 +835,33 @@ - - - PARTIAL1 - - - - + + + + + + + + KEY.0 + + + + + + + + PARTIAL1 + + - - - - - + diff --git a/ql/src/test/results/compiler/plan/groupby6.q.xml b/ql/src/test/results/compiler/plan/groupby6.q.xml index 9343a84b5..3ae92be24 100644 --- a/ql/src/test/results/compiler/plan/groupby6.q.xml +++ b/ql/src/test/results/compiler/plan/groupby6.q.xml @@ -35,7 +35,7 @@ true - /tmp/hive-zshao/43559649.10000.insclause-0 + /tmp/hive-zshao/20627718/64699543.10000.insclause-0 @@ -55,8 +55,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -66,17 +66,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -88,12 +84,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -122,10 +118,13 @@ - /tmp/hive-zshao/43559649.10001 + /tmp/hive-zshao/20627718/64699543.10001 + + true + @@ -172,6 +171,9 @@ 1 + + -1 + -1 @@ -227,13 +229,16 @@ + + true + - /tmp/hive-zshao/43559649.10001 + /tmp/hive-zshao/20627718/64699543.10001 - /tmp/hive-zshao/43559649.10001 + /tmp/hive-zshao/20627718/64699543.10001 @@ -242,7 +247,7 @@ - /tmp/hive-zshao/43559649.10001 + /tmp/hive-zshao/20627718/64699543.10001 @@ -286,7 +291,7 @@ - /tmp/hive-zshao/43559649.10000.insclause-0 + /tmp/hive-zshao/20627718/64699543.10000.insclause-0 @@ -414,75 +419,6 @@ - - - - - - java.lang.Integer - - - - - 0 - - - - - - - - - org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - - - - - columns - 0 - - - serialization.format - 1 - - - - - - - -1 - - - -1 - - - - - - - key - - - - - - - - - - value - - - - - - @@ -519,7 +455,11 @@ - + + + java.lang.Integer + + 4 @@ -545,6 +485,43 @@ + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + columns + 0 + + + serialization.format + 1 + + + + + + + -1 + + + -1 + + + -1 + + + + @@ -560,7 +537,7 @@ columns - 0,1,2 + serialization.format @@ -575,31 +552,11 @@ - - - - - VALUE.0 - - - - - - - - - - VALUE.1 - - - - - - + - VALUE.2 + KEY.0 @@ -649,7 +606,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -661,7 +618,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -684,8 +641,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -695,17 +652,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -717,12 +670,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -731,87 +684,72 @@ - + - - - - - - - - - /tmp/hive-zshao/43559649.10001 - - - - - - - - - - - - - - 0 - - - - - - - - - - - + + + + + /tmp/hive-zshao/20627718/64699543.10001 + + + - - - - - - - + + + + - - - VALUE.2 + + + 0 - + - - - PARTIAL1 - - - - - - - - - + + + + + + + + + + + KEY.0 + + + + + + + + + + + PARTIAL1 + + + + diff --git a/ql/src/test/results/compiler/plan/input1.q.xml b/ql/src/test/results/compiler/plan/input1.q.xml index a4327155c..1f3d17a23 100755 --- a/ql/src/test/results/compiler/plan/input1.q.xml +++ b/ql/src/test/results/compiler/plan/input1.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/54141209.10000.insclause-0 + /tmp/hive-zshao/374447248/1282977307.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/54141209.10000.insclause-0 + /tmp/hive-zshao/374447248/1282977307.10000.insclause-0 @@ -315,7 +311,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -327,7 +323,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -350,8 +346,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -361,17 +357,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -383,12 +375,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input2.q.xml b/ql/src/test/results/compiler/plan/input2.q.xml index 8b39c0ce5..83f0a55d1 100755 --- a/ql/src/test/results/compiler/plan/input2.q.xml +++ b/ql/src/test/results/compiler/plan/input2.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/2123304.10000.insclause-0 + /tmp/hive-zshao/20345754/201293413.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -104,7 +100,7 @@ true - /tmp/hive-zshao/2123304.10001.insclause-1 + /tmp/hive-zshao/20345754/201293413.10001.insclause-1 @@ -124,8 +120,8 @@ dest2 - bucket_field_name - + serialization.ddl + struct dest2 { string key, string value} serialization.format @@ -135,17 +131,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -157,12 +149,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest2 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest2 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -186,7 +178,7 @@ true - /tmp/hive-zshao/2123304.10002.insclause-2 + /tmp/hive-zshao/20345754/201293413.10002.insclause-2 @@ -206,8 +198,8 @@ dest3 - bucket_field_name - + serialization.ddl + struct dest3 { string key, string value} serialization.format @@ -221,17 +213,13 @@ partition_columns ds/hr - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -243,12 +231,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest3 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest3 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -287,7 +275,7 @@ - /tmp/hive-zshao/2123304.10000.insclause-0 + /tmp/hive-zshao/20345754/201293413.10000.insclause-0 @@ -471,7 +459,7 @@ - /tmp/hive-zshao/2123304.10001.insclause-1 + /tmp/hive-zshao/20345754/201293413.10001.insclause-1 @@ -696,7 +684,7 @@ - /tmp/hive-zshao/2123304.10002.insclause-2 + /tmp/hive-zshao/20345754/201293413.10002.insclause-2 @@ -830,7 +818,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -842,7 +830,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -865,8 +853,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -876,17 +864,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -898,12 +882,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input3.q.xml b/ql/src/test/results/compiler/plan/input3.q.xml index 5d8037fa3..1dacd1890 100755 --- a/ql/src/test/results/compiler/plan/input3.q.xml +++ b/ql/src/test/results/compiler/plan/input3.q.xml @@ -21,11 +21,14 @@ + + value + true - /tmp/hive-zshao/160162101.10003.insclause-3 + /tmp/hive-njain/356693511/91381909.10003.insclause-3 ../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out @@ -45,7 +48,7 @@ true - /tmp/hive-zshao/160162101.10000.insclause-0 + /tmp/hive-njain/356693511/91381909.10000.insclause-0 @@ -65,8 +68,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -76,17 +79,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -98,12 +97,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -118,7 +117,7 @@ true - /tmp/hive-zshao/160162101.10001.insclause-1 + /tmp/hive-njain/356693511/91381909.10001.insclause-1 @@ -138,8 +137,8 @@ dest2 - bucket_field_name - + serialization.ddl + struct dest2 { string key, string value} serialization.format @@ -149,17 +148,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -171,12 +166,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest2 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest2 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -200,7 +195,7 @@ true - /tmp/hive-zshao/160162101.10002.insclause-2 + /tmp/hive-njain/356693511/91381909.10002.insclause-2 @@ -220,8 +215,8 @@ dest3 - bucket_field_name - + serialization.ddl + struct dest3 { string key, string value} serialization.format @@ -235,17 +230,13 @@ partition_columns ds/hr - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -257,12 +248,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest3 + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest3 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -301,7 +292,7 @@ - /tmp/hive-zshao/160162101.10000.insclause-0 + /tmp/hive-njain/356693511/91381909.10000.insclause-0 @@ -485,7 +476,7 @@ - /tmp/hive-zshao/160162101.10001.insclause-1 + /tmp/hive-njain/356693511/91381909.10001.insclause-1 @@ -710,7 +701,7 @@ - /tmp/hive-zshao/160162101.10002.insclause-2 + /tmp/hive-njain/356693511/91381909.10002.insclause-2 @@ -915,7 +906,7 @@ - /tmp/hive-zshao/160162101.10003.insclause-3 + /tmp/hive-njain/356693511/91381909.10003.insclause-3 @@ -1067,7 +1058,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -1079,7 +1070,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1102,8 +1093,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -1113,17 +1104,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -1135,12 +1122,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input4.q.xml b/ql/src/test/results/compiler/plan/input4.q.xml index 0afa22504..d40cf8441 100755 --- a/ql/src/test/results/compiler/plan/input4.q.xml +++ b/ql/src/test/results/compiler/plan/input4.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/225748576.10000.insclause-0 + /tmp/hive-zshao/1042895489/270462051.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -173,6 +169,9 @@ 1 + + -1 + -1 @@ -422,7 +421,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src tmap:src @@ -434,7 +433,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -457,8 +456,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -468,17 +467,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -490,12 +485,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -520,7 +515,7 @@ - /tmp/hive-zshao/225748576.10000.insclause-0 + /tmp/hive-zshao/1042895489/270462051.10000.insclause-0 diff --git a/ql/src/test/results/compiler/plan/input5.q.xml b/ql/src/test/results/compiler/plan/input5.q.xml index 29c1cd7f5..5c7581711 100644 --- a/ql/src/test/results/compiler/plan/input5.q.xml +++ b/ql/src/test/results/compiler/plan/input5.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/106489316.10000.insclause-0 + /tmp/hive-zshao/641253778/15454385.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -173,6 +169,9 @@ 1 + + -1 + -1 @@ -485,7 +484,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift tmap:src_thrift @@ -497,7 +496,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -520,8 +519,8 @@ src_thrift - bucket_field_name - + serialization.ddl + struct src_thrift { } columns @@ -533,11 +532,7 @@ serialization.class - org.apache.hadoop.hive.ql.thrift.Complex - - - field_to_dimension - + org.apache.hadoop.hive.serde2.thrift.test.Complex bucket_count @@ -545,7 +540,7 @@ serialization.lib - thrift + org.apache.hadoop.hive.serde2.ThriftDeserializer file.inputformat @@ -557,12 +552,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift - thrift + org.apache.hadoop.hive.serde2.ThriftDeserializer @@ -583,7 +578,7 @@ - /tmp/hive-zshao/106489316.10000.insclause-0 + /tmp/hive-zshao/641253778/15454385.10000.insclause-0 diff --git a/ql/src/test/results/compiler/plan/input6.q.xml b/ql/src/test/results/compiler/plan/input6.q.xml index 3672cfd10..c7882cb21 100644 --- a/ql/src/test/results/compiler/plan/input6.q.xml +++ b/ql/src/test/results/compiler/plan/input6.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/111172075.10000.insclause-0 + /tmp/hive-zshao/301573619/12587577.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/111172075.10000.insclause-0 + /tmp/hive-zshao/301573619/12587577.10000.insclause-0 @@ -222,7 +218,7 @@ evaluate - java.lang.String + java.lang.Object @@ -298,7 +294,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 src1 @@ -310,7 +306,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -333,8 +329,8 @@ src1 - bucket_field_name - + serialization.ddl + struct src1 { string key, string value} serialization.format @@ -344,17 +340,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -366,12 +358,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input7.q.xml b/ql/src/test/results/compiler/plan/input7.q.xml index 9e0267ab4..8bb6a3ac9 100644 --- a/ql/src/test/results/compiler/plan/input7.q.xml +++ b/ql/src/test/results/compiler/plan/input7.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/305356690.10000.insclause-0 + /tmp/hive-zshao/182607732/631207979.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -124,7 +120,7 @@ - /tmp/hive-zshao/305356690.10000.insclause-0 + /tmp/hive-zshao/182607732/631207979.10000.insclause-0 @@ -141,9 +137,9 @@ 0 - + - java.lang.String + java.lang.Void @@ -155,7 +151,11 @@ 1 - + + + java.lang.String + + @@ -238,7 +238,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 src1 @@ -250,7 +250,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -273,8 +273,8 @@ src1 - bucket_field_name - + serialization.ddl + struct src1 { string key, string value} serialization.format @@ -284,17 +284,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -306,12 +302,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input8.q.xml b/ql/src/test/results/compiler/plan/input8.q.xml index bb1415709..a0ef71f34 100644 --- a/ql/src/test/results/compiler/plan/input8.q.xml +++ b/ql/src/test/results/compiler/plan/input8.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/339241232.10000.insclause-0 + /tmp/hive-zshao/243084011/767326882.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -124,7 +120,7 @@ - /tmp/hive-zshao/339241232.10000.insclause-0 + /tmp/hive-zshao/243084011/767326882.10000.insclause-0 @@ -143,7 +139,7 @@ - java.lang.String + java.lang.Integer @@ -155,7 +151,11 @@ 1 - + + + java.lang.Double + + @@ -165,7 +165,11 @@ 2 - + + + java.lang.Byte + + @@ -204,11 +208,7 @@ - - - java.lang.Integer - - + 4 @@ -218,14 +218,14 @@ - + - + @@ -272,32 +272,32 @@ key - + + + java.lang.String + + - - - java.lang.Double - - + - + - + @@ -324,25 +324,21 @@ - - - java.lang.Byte - - + - + - + @@ -371,7 +367,7 @@ key - + @@ -381,7 +377,7 @@ value - + @@ -396,7 +392,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 src1 @@ -408,7 +404,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -431,8 +427,8 @@ src1 - bucket_field_name - + serialization.ddl + struct src1 { string key, string value} serialization.format @@ -442,17 +438,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -464,12 +456,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input9.q.xml b/ql/src/test/results/compiler/plan/input9.q.xml index 024b4c6cc..da607b288 100644 --- a/ql/src/test/results/compiler/plan/input9.q.xml +++ b/ql/src/test/results/compiler/plan/input9.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/350289238.10000.insclause-0 + /tmp/hive-zshao/1031500531/592905830.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/350289238.10000.insclause-0 + /tmp/hive-zshao/1031500531/592905830.10000.insclause-0 @@ -145,9 +141,9 @@ 0 - + - java.lang.String + java.lang.Void @@ -159,7 +155,11 @@ 1 - + + + java.lang.String + + @@ -302,7 +302,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 src1 @@ -314,7 +314,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 @@ -337,8 +337,8 @@ src1 - bucket_field_name - + serialization.ddl + struct src1 { string key, string value} serialization.format @@ -348,17 +348,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -370,12 +366,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input_part1.q.xml b/ql/src/test/results/compiler/plan/input_part1.q.xml index 3b4fed1bb..68c1116f6 100644 --- a/ql/src/test/results/compiler/plan/input_part1.q.xml +++ b/ql/src/test/results/compiler/plan/input_part1.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/476762580.10000.insclause-0 + /tmp/hive-zshao/587528285/609899583.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/476762580.10000.insclause-0 + /tmp/hive-zshao/587528285/609899583.10000.insclause-0 @@ -168,9 +164,6 @@ 2 - - true - @@ -181,9 +174,6 @@ 3 - - true - @@ -492,9 +482,6 @@ ds - - true - @@ -505,9 +492,6 @@ hr - - true - @@ -535,7 +519,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 srcpart @@ -547,7 +531,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 @@ -579,8 +563,8 @@ srcpart - bucket_field_name - + serialization.ddl + struct srcpart { string key, string value} serialization.format @@ -594,17 +578,13 @@ partition_columns ds/hr - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -616,12 +596,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml b/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml index 9dc5018d3..fbf80124e 100644 --- a/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml +++ b/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/238032349.10000.insclause-0 + /tmp/hive-zshao/1185059341/85219314.10000.insclause-0 @@ -51,8 +51,8 @@ dest4_sequencefile - bucket_field_name - + serialization.ddl + struct dest4_sequencefile { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest4_sequencefile + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest4_sequencefile - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -124,7 +120,7 @@ - /tmp/hive-zshao/238032349.10000.insclause-0 + /tmp/hive-zshao/1185059341/85219314.10000.insclause-0 @@ -241,7 +237,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -253,7 +249,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -276,8 +272,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -287,17 +283,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -309,12 +301,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/input_testxpath.q.xml b/ql/src/test/results/compiler/plan/input_testxpath.q.xml index 05c6e0ac5..acf66998f 100644 --- a/ql/src/test/results/compiler/plan/input_testxpath.q.xml +++ b/ql/src/test/results/compiler/plan/input_testxpath.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/638994494.10000.insclause-0 + /tmp/hive-zshao/780638111/964614278.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -124,7 +120,7 @@ - /tmp/hive-zshao/638994494.10000.insclause-0 + /tmp/hive-zshao/780638111/964614278.10000.insclause-0 @@ -143,7 +139,7 @@ - java.lang.String + java.lang.Integer @@ -155,7 +151,21 @@ 1 - + + + java.lang.String + + + + + + + + + 2 + + + @@ -181,11 +191,7 @@ - - - java.lang.Integer - - + @@ -194,7 +200,7 @@ - + 1 @@ -202,7 +208,7 @@ - + @@ -227,7 +233,7 @@ - + 0 @@ -242,8 +248,45 @@ mystring + + false + - + + + + + + + + + + mstringstring + + + + + + + + + + + + + + + + + + + + key_2 + + + + + @@ -272,7 +315,7 @@ aint - + @@ -282,7 +325,7 @@ astring - + @@ -304,7 +347,7 @@ - + @@ -326,14 +369,7 @@ mstringstring - - - - - - - - + @@ -348,7 +384,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift src_thrift @@ -360,7 +396,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift @@ -383,8 +419,8 @@ src_thrift - bucket_field_name - + serialization.ddl + struct src_thrift { } columns @@ -396,11 +432,7 @@ serialization.class - org.apache.hadoop.hive.ql.thrift.Complex - - - field_to_dimension - + org.apache.hadoop.hive.serde2.thrift.test.Complex bucket_count @@ -408,7 +440,7 @@ serialization.lib - thrift + org.apache.hadoop.hive.serde2.ThriftDeserializer file.inputformat @@ -420,12 +452,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift - thrift + org.apache.hadoop.hive.serde2.ThriftDeserializer diff --git a/ql/src/test/results/compiler/plan/input_testxpath2.q.xml b/ql/src/test/results/compiler/plan/input_testxpath2.q.xml new file mode 100644 index 000000000..bb5ca00af --- /dev/null +++ b/ql/src/test/results/compiler/plan/input_testxpath2.q.xml @@ -0,0 +1,621 @@ + + + + + + + + + Stage-1 + + + + + + + + + + + + + + + + + + + + + + true + + + /tmp/hive-zshao/421781212/46322741.10000.insclause-0 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + name + dest1 + + + serialization.ddl + struct dest1 { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + file.outputformat + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + location + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + + + + + + + + + + + + + Stage-2 + + + + + + + src_thrift + + + + + + + + + + + + + + + + + /tmp/hive-zshao/421781212/46322741.10000.insclause-0 + + + + + + + + + + + + + + 0 + + + + + java.lang.Integer + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSize + + + + evaluate + + + java.util.List + + + + + + + + + + lint + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSize + + + + evaluate + + + java.util.List + + + + + + + + + + lintstring + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFSize + + + + evaluate + + + java.util.Map + + + + + + + + + + mstringstring + + + + + + + java.lang.String + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPAnd + + + + evaluate + + + java.lang.Boolean + + + java.lang.Boolean + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNotNull + + + + evaluate + + + java.lang.Object + + + + + + + + + + lint + + + + + + + + + + + + java.lang.Boolean + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNot + + + + evaluate + + + java.lang.Boolean + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNull + + + + evaluate + + + java.lang.Object + + + + + + + + + + mstringstring + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aint + + + + + + + + + + astring + + + + + + + + + + lint + + + + + + + + + + lstring + + + + + + + + + + + + + + lintstring + + + + + + + + + + mstringstring + + + + + + + + + + + + + + + + + + + + + + + + + + + + + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + + + src_thrift + + + + + + + + + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + + + + + + + + org.apache.hadoop.hive.serde2.ThriftDeserializer + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + + + name + src_thrift + + + serialization.ddl + struct src_thrift { } + + + columns + + + + serialization.format + com.facebook.thrift.protocol.TBinaryProtocol + + + serialization.class + org.apache.hadoop.hive.serde2.thrift.test.Complex + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.ThriftDeserializer + + + file.inputformat + org.apache.hadoop.mapred.SequenceFileInputFormat + + + file.outputformat + org.apache.hadoop.mapred.SequenceFileOutputFormat + + + location + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src_thrift + + + + + org.apache.hadoop.hive.serde2.ThriftDeserializer + + + + + + + + + + + diff --git a/ql/src/test/results/compiler/plan/join1.q.xml b/ql/src/test/results/compiler/plan/join1.q.xml index 04599f3c4..c216f8493 100644 --- a/ql/src/test/results/compiler/plan/join1.q.xml +++ b/ql/src/test/results/compiler/plan/join1.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/454170994.10000.insclause-0 + /tmp/hive-zshao/108001173/349936017.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -165,6 +161,9 @@ 1 + + -1 + 1 @@ -332,6 +331,9 @@ 1 + + -1 + @@ -453,7 +455,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src2 @@ -468,7 +470,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -491,8 +493,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -502,17 +504,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -524,12 +522,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -550,7 +548,7 @@ - /tmp/hive-zshao/454170994.10000.insclause-0 + /tmp/hive-zshao/108001173/349936017.10000.insclause-0 diff --git a/ql/src/test/results/compiler/plan/join2.q.xml b/ql/src/test/results/compiler/plan/join2.q.xml index 4349db7c4..4bdcfc4fa 100644 --- a/ql/src/test/results/compiler/plan/join2.q.xml +++ b/ql/src/test/results/compiler/plan/join2.q.xml @@ -38,7 +38,7 @@ true - /tmp/hive-zshao/77687716.10000.insclause-0 + /tmp/hive-zshao/1036144012/1033953173.10000.insclause-0 @@ -58,8 +58,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -69,17 +69,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -91,12 +87,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -207,6 +203,9 @@ 1 + + -1 + 1 @@ -455,6 +454,9 @@ 1 + + -1 + @@ -584,7 +586,7 @@ - /tmp/hive-zshao/77687716.10001 + /tmp/hive-zshao/1036144012/1033953173.10001 $INTNAME @@ -592,7 +594,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src3 @@ -604,7 +606,7 @@ - /tmp/hive-zshao/77687716.10001 + /tmp/hive-zshao/1036144012/1033953173.10001 @@ -634,7 +636,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -657,8 +659,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -668,17 +670,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -690,12 +688,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -716,7 +714,7 @@ - /tmp/hive-zshao/77687716.10000.insclause-0 + /tmp/hive-zshao/1036144012/1033953173.10000.insclause-0 @@ -1020,6 +1018,9 @@ 1 + + -1 + 1 @@ -1187,6 +1188,9 @@ 1 + + -1 + @@ -1308,7 +1312,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src2 @@ -1323,7 +1327,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1346,8 +1350,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -1357,17 +1361,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -1379,12 +1379,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -1401,7 +1401,7 @@ - /tmp/hive-zshao/77687716.10001 + /tmp/hive-zshao/1036144012/1033953173.10001 diff --git a/ql/src/test/results/compiler/plan/join3.q.xml b/ql/src/test/results/compiler/plan/join3.q.xml index a1d86b3e2..74df49df0 100644 --- a/ql/src/test/results/compiler/plan/join3.q.xml +++ b/ql/src/test/results/compiler/plan/join3.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/416491702.10000.insclause-0 + /tmp/hive-zshao/1679400869/64637247.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -165,6 +161,9 @@ 1 + + -1 + 1 @@ -332,6 +331,9 @@ 1 + + -1 + 2 @@ -499,6 +501,9 @@ 1 + + -1 + @@ -620,7 +625,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src2 @@ -638,7 +643,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -661,8 +666,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -672,17 +677,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -694,12 +695,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -720,7 +721,7 @@ - /tmp/hive-zshao/416491702.10000.insclause-0 + /tmp/hive-zshao/1679400869/64637247.10000.insclause-0 diff --git a/ql/src/test/results/compiler/plan/join4.q.xml b/ql/src/test/results/compiler/plan/join4.q.xml index b80912ccd..fb0f78559 100644 --- a/ql/src/test/results/compiler/plan/join4.q.xml +++ b/ql/src/test/results/compiler/plan/join4.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/1000008087.10000.insclause-0 + /tmp/hive-zshao/257975023/155084340.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -173,6 +169,9 @@ 1 + + -1 + 1 @@ -552,6 +551,9 @@ 1 + + -1 + @@ -869,7 +871,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -884,7 +886,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -907,8 +909,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -918,17 +920,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -940,12 +938,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -970,7 +968,7 @@ - /tmp/hive-zshao/1000008087.10000.insclause-0 + /tmp/hive-zshao/257975023/155084340.10000.insclause-0 diff --git a/ql/src/test/results/compiler/plan/join5.q.xml b/ql/src/test/results/compiler/plan/join5.q.xml index 93c76db2e..4432c0ada 100644 --- a/ql/src/test/results/compiler/plan/join5.q.xml +++ b/ql/src/test/results/compiler/plan/join5.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/1144028705.10000.insclause-0 + /tmp/hive-zshao/936351131/313796179.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -173,6 +169,9 @@ 1 + + -1 + 1 @@ -552,6 +551,9 @@ 1 + + -1 + @@ -869,7 +871,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -884,7 +886,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -907,8 +909,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -918,17 +920,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -940,12 +938,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -970,7 +968,7 @@ - /tmp/hive-zshao/1144028705.10000.insclause-0 + /tmp/hive-zshao/936351131/313796179.10000.insclause-0 diff --git a/ql/src/test/results/compiler/plan/join6.q.xml b/ql/src/test/results/compiler/plan/join6.q.xml index 08ac228bf..3d4484143 100644 --- a/ql/src/test/results/compiler/plan/join6.q.xml +++ b/ql/src/test/results/compiler/plan/join6.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/601595330.10000.insclause-0 + /tmp/hive-zshao/161126421/564619381.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -173,6 +169,9 @@ 1 + + -1 + 1 @@ -552,6 +551,9 @@ 1 + + -1 + @@ -869,7 +871,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -884,7 +886,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -907,8 +909,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -918,17 +920,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -940,12 +938,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -970,7 +968,7 @@ - /tmp/hive-zshao/601595330.10000.insclause-0 + /tmp/hive-zshao/161126421/564619381.10000.insclause-0 diff --git a/ql/src/test/results/compiler/plan/join7.q.xml b/ql/src/test/results/compiler/plan/join7.q.xml index 7f7de49a4..727699e63 100644 --- a/ql/src/test/results/compiler/plan/join7.q.xml +++ b/ql/src/test/results/compiler/plan/join7.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/941883877.10000.insclause-0 + /tmp/hive-zshao/61032244/444662007.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -173,6 +169,9 @@ 1 + + -1 + 1 @@ -552,6 +551,9 @@ 1 + + -1 + @@ -920,6 +922,9 @@ 1 + + -1 + 2 @@ -1240,7 +1245,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -1258,7 +1263,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1281,8 +1286,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -1292,17 +1297,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -1314,12 +1315,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -1344,7 +1345,7 @@ - /tmp/hive-zshao/941883877.10000.insclause-0 + /tmp/hive-zshao/61032244/444662007.10000.insclause-0 diff --git a/ql/src/test/results/compiler/plan/join8.q.xml b/ql/src/test/results/compiler/plan/join8.q.xml index 9b26a0964..7778a51d9 100644 --- a/ql/src/test/results/compiler/plan/join8.q.xml +++ b/ql/src/test/results/compiler/plan/join8.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/113642626.10000.insclause-0 + /tmp/hive-zshao/616083721/271603255.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -173,6 +169,9 @@ 1 + + -1 + 1 @@ -552,6 +551,9 @@ 1 + + -1 + @@ -869,7 +871,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src c:a:src1 @@ -884,7 +886,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -907,8 +909,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -918,17 +920,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -940,12 +938,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -974,7 +972,7 @@ - /tmp/hive-zshao/113642626.10000.insclause-0 + /tmp/hive-zshao/616083721/271603255.10000.insclause-0 @@ -1124,7 +1122,7 @@ evaluate - java.lang.String + java.lang.Object @@ -1158,7 +1156,7 @@ evaluate - java.lang.String + java.lang.Object diff --git a/ql/src/test/results/compiler/plan/sample1.q.xml b/ql/src/test/results/compiler/plan/sample1.q.xml index e163e2372..7c45b8033 100644 --- a/ql/src/test/results/compiler/plan/sample1.q.xml +++ b/ql/src/test/results/compiler/plan/sample1.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/299196740.10000.insclause-0 + /tmp/hive-zshao/130068324/266130293.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/299196740.10000.insclause-0 + /tmp/hive-zshao/130068324/266130293.10000.insclause-0 @@ -168,9 +164,6 @@ 2 - - true - @@ -181,9 +174,6 @@ 3 - - true - @@ -414,9 +404,6 @@ ds - - true - @@ -427,9 +414,6 @@ hr - - true - @@ -457,7 +441,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 s @@ -469,7 +453,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 @@ -501,8 +485,8 @@ srcpart - bucket_field_name - + serialization.ddl + struct srcpart { string key, string value} serialization.format @@ -516,17 +500,13 @@ partition_columns ds/hr - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -538,12 +518,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcpart - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/sample2.q.xml b/ql/src/test/results/compiler/plan/sample2.q.xml index 68ac38a8c..25c5936a6 100644 --- a/ql/src/test/results/compiler/plan/sample2.q.xml +++ b/ql/src/test/results/compiler/plan/sample2.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/1471624673.10000.insclause-0 + /tmp/hive-zshao/741118865/96139643.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -124,7 +120,7 @@ - /tmp/hive-zshao/1471624673.10000.insclause-0 + /tmp/hive-zshao/741118865/96139643.10000.insclause-0 @@ -241,7 +237,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt s @@ -253,7 +249,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket/kv1.txt @@ -276,8 +272,8 @@ srcbucket - bucket_field_name - + serialization.ddl + struct srcbucket { string key, string value} serialization.format @@ -287,17 +283,13 @@ columns key,value - - field_to_dimension - - bucket_count 2 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -309,12 +301,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/sample3.q.xml b/ql/src/test/results/compiler/plan/sample3.q.xml index ddfdc02fb..e2f42cde0 100644 --- a/ql/src/test/results/compiler/plan/sample3.q.xml +++ b/ql/src/test/results/compiler/plan/sample3.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/426518465.10000.insclause-0 + /tmp/hive-zshao/1895709606/245717296.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/426518465.10000.insclause-0 + /tmp/hive-zshao/1895709606/245717296.10000.insclause-0 @@ -363,7 +359,7 @@ - 1 + 0 @@ -419,7 +415,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket s @@ -431,7 +427,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -454,8 +450,8 @@ srcbucket - bucket_field_name - + serialization.ddl + struct srcbucket { string key, string value} serialization.format @@ -465,17 +461,13 @@ columns key,value - - field_to_dimension - - bucket_count 2 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -487,12 +479,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/sample4.q.xml b/ql/src/test/results/compiler/plan/sample4.q.xml index 958699719..c88574ce1 100644 --- a/ql/src/test/results/compiler/plan/sample4.q.xml +++ b/ql/src/test/results/compiler/plan/sample4.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/371950458.10000.insclause-0 + /tmp/hive-zshao/614066513/551990576.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/371950458.10000.insclause-0 + /tmp/hive-zshao/614066513/551990576.10000.insclause-0 @@ -350,7 +346,7 @@ - 1 + 0 @@ -406,7 +402,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket s @@ -418,7 +414,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -441,8 +437,8 @@ srcbucket - bucket_field_name - + serialization.ddl + struct srcbucket { string key, string value} serialization.format @@ -452,17 +448,13 @@ columns key,value - - field_to_dimension - - bucket_count 2 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -474,12 +466,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/sample5.q.xml b/ql/src/test/results/compiler/plan/sample5.q.xml index e94c0682c..3bc4c55da 100644 --- a/ql/src/test/results/compiler/plan/sample5.q.xml +++ b/ql/src/test/results/compiler/plan/sample5.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/450613668.10000.insclause-0 + /tmp/hive-zshao/532954132/21366373.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/450613668.10000.insclause-0 + /tmp/hive-zshao/532954132/21366373.10000.insclause-0 @@ -350,7 +346,7 @@ - 1 + 0 @@ -406,7 +402,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket s @@ -418,7 +414,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -441,8 +437,8 @@ srcbucket - bucket_field_name - + serialization.ddl + struct srcbucket { string key, string value} serialization.format @@ -452,17 +448,13 @@ columns key,value - - field_to_dimension - - bucket_count 2 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -474,12 +466,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/sample6.q.xml b/ql/src/test/results/compiler/plan/sample6.q.xml index c46d9c3e5..8ba512178 100644 --- a/ql/src/test/results/compiler/plan/sample6.q.xml +++ b/ql/src/test/results/compiler/plan/sample6.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/5802842.10000.insclause-0 + /tmp/hive-zshao/554218395/580685485.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/5802842.10000.insclause-0 + /tmp/hive-zshao/554218395/580685485.10000.insclause-0 @@ -350,7 +346,7 @@ - 1 + 0 @@ -406,7 +402,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket s @@ -418,7 +414,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -441,8 +437,8 @@ srcbucket - bucket_field_name - + serialization.ddl + struct srcbucket { string key, string value} serialization.format @@ -452,17 +448,13 @@ columns key,value - - field_to_dimension - - bucket_count 2 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -474,12 +466,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/sample7.q.xml b/ql/src/test/results/compiler/plan/sample7.q.xml index 075d13c20..2be5da41e 100644 --- a/ql/src/test/results/compiler/plan/sample7.q.xml +++ b/ql/src/test/results/compiler/plan/sample7.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/33801322.10000.insclause-0 + /tmp/hive-zshao/393286311/133813526.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -132,7 +128,7 @@ - /tmp/hive-zshao/33801322.10000.insclause-0 + /tmp/hive-zshao/393286311/133813526.10000.insclause-0 @@ -441,7 +437,7 @@ - 1 + 0 @@ -472,7 +468,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket s @@ -484,7 +480,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket @@ -507,8 +503,8 @@ srcbucket - bucket_field_name - + serialization.ddl + struct srcbucket { string key, string value} serialization.format @@ -518,17 +514,13 @@ columns key,value - - field_to_dimension - - bucket_count 2 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -540,12 +532,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/srcbucket - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/subq.q.xml b/ql/src/test/results/compiler/plan/subq.q.xml index ad6b86faf..7485b6c8e 100644 --- a/ql/src/test/results/compiler/plan/subq.q.xml +++ b/ql/src/test/results/compiler/plan/subq.q.xml @@ -21,11 +21,14 @@ + + key,value + true - /tmp/hive-zshao/147290682.10000.insclause-0 + /tmp/hive-njain/560916151/1453534343.10000.insclause-0 ../../../../build/contrib/hive/ql/test/data/warehouse/union.out @@ -72,7 +75,7 @@ - /tmp/hive-zshao/147290682.10000.insclause-0 + /tmp/hive-njain/560916151/1453534343.10000.insclause-0 @@ -337,7 +340,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src unioninput:src @@ -349,7 +352,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -372,8 +375,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -383,17 +386,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -405,12 +404,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/udf1.q.xml b/ql/src/test/results/compiler/plan/udf1.q.xml index 0bb55cd02..1454a96f7 100644 --- a/ql/src/test/results/compiler/plan/udf1.q.xml +++ b/ql/src/test/results/compiler/plan/udf1.q.xml @@ -31,7 +31,7 @@ true - /tmp/hive-zshao/151420038.10000.insclause-0 + /tmp/hive-zshao/247949494/314100641.10000.insclause-0 @@ -51,8 +51,8 @@ dest1 - bucket_field_name - + serialization.ddl + struct dest1 { string key, string value} serialization.format @@ -62,17 +62,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -84,12 +80,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe @@ -128,7 +124,7 @@ - /tmp/hive-zshao/151420038.10000.insclause-0 + /tmp/hive-zshao/247949494/314100641.10000.insclause-0 @@ -147,7 +143,7 @@ - java.lang.String + java.lang.Boolean @@ -279,7 +275,11 @@ 13 - + + + java.lang.String + + @@ -289,7 +289,7 @@ 14 - + @@ -299,7 +299,7 @@ 15 - + @@ -309,7 +309,7 @@ 16 - + @@ -348,7 +348,7 @@ - + a @@ -358,7 +358,7 @@ - + %a% @@ -368,11 +368,7 @@ - - - java.lang.Boolean - - + @@ -399,7 +395,7 @@ - + b @@ -409,7 +405,7 @@ - + %a% @@ -419,7 +415,7 @@ - + @@ -446,7 +442,7 @@ - + ab @@ -456,7 +452,7 @@ - + %a% @@ -466,7 +462,7 @@ - + @@ -493,7 +489,7 @@ - + ab @@ -503,7 +499,7 @@ - + %a_ @@ -513,7 +509,7 @@ - + @@ -540,7 +536,7 @@ - + %_ @@ -550,7 +546,7 @@ - + \%\_ @@ -560,7 +556,7 @@ - + @@ -587,7 +583,7 @@ - + ab @@ -597,7 +593,7 @@ - + \%\_ @@ -607,7 +603,7 @@ - + @@ -634,7 +630,7 @@ - + ab @@ -644,7 +640,7 @@ - + _a% @@ -654,7 +650,7 @@ - + @@ -681,7 +677,7 @@ - + ab @@ -691,7 +687,7 @@ - + a @@ -701,7 +697,7 @@ - + @@ -728,7 +724,7 @@ - + @@ -738,7 +734,7 @@ - + .* @@ -748,7 +744,7 @@ - + @@ -775,7 +771,7 @@ - + a @@ -785,7 +781,7 @@ - + [ab] @@ -795,7 +791,7 @@ - + @@ -822,7 +818,7 @@ - + @@ -832,7 +828,7 @@ - + [ab] @@ -842,7 +838,7 @@ - + @@ -869,7 +865,7 @@ - + hadoop @@ -879,7 +875,7 @@ - + [a-z]* @@ -889,7 +885,7 @@ - + @@ -916,7 +912,7 @@ - + hadoop @@ -926,7 +922,7 @@ - + o* @@ -936,7 +932,7 @@ - + @@ -966,7 +962,7 @@ - + abc @@ -976,7 +972,7 @@ - + b @@ -986,7 +982,7 @@ - + c @@ -996,7 +992,7 @@ - + @@ -1026,7 +1022,7 @@ - + abc @@ -1036,7 +1032,7 @@ - + z @@ -1046,7 +1042,7 @@ - + a @@ -1056,7 +1052,7 @@ - + @@ -1086,7 +1082,7 @@ - + abbbb @@ -1096,7 +1092,7 @@ - + bb @@ -1106,7 +1102,7 @@ - + b @@ -1116,7 +1112,7 @@ - + @@ -1146,7 +1142,7 @@ - + hadoop @@ -1156,7 +1152,7 @@ - + (.)[a-z]* @@ -1166,7 +1162,7 @@ - + $1ive @@ -1176,7 +1172,7 @@ - + @@ -1223,7 +1219,7 @@ key - + @@ -1244,7 +1240,7 @@ - + @@ -1260,7 +1256,7 @@ key - + @@ -1270,7 +1266,7 @@ value - + @@ -1296,7 +1292,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src src @@ -1308,7 +1304,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -1331,8 +1327,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -1342,17 +1338,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -1364,12 +1356,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/results/compiler/plan/udf4.q.xml b/ql/src/test/results/compiler/plan/udf4.q.xml new file mode 100644 index 000000000..b7957f1c2 --- /dev/null +++ b/ql/src/test/results/compiler/plan/udf4.q.xml @@ -0,0 +1,1038 @@ + + + + + Stage-2 + + + + + + + dest1 + + + + + + + + + + + + + /tmp/hive-njain/463574005/202732636.10001.insclause-0 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + serialization.format + 1 + + + + + + + + + + + + + + + 0 + + + + + java.lang.Long + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + + + + 3 + + + + + + + + + + 4 + + + + + + + + + + 5 + + + + + + + + + + 6 + + + + + + + + + + 7 + + + + + + + + + + 8 + + + + + + + + + + 9 + + + + + + + + + + 10 + + + + + java.lang.Double + + + + + + + + + 11 + + + + + java.lang.Integer + + + + + + + + + 12 + + + + + + + + + + 13 + + + + + + + + + + 14 + + + + + + + + + + 15 + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRound + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.0 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRound + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRound + + + + evaluate + + + java.lang.Double + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFFloor + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.0 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFFloor + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFFloor + + + + evaluate + + + java.lang.Double + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFCeil + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.0 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFCeil + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFCeil + + + + evaluate + + + java.lang.Double + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.5 + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFCeil + + + + evaluate + + + java.lang.Double + + + + + + + + + + + + + 1.0 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRand + + + + evaluate + + + long + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToLong + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 3 + + + + + + + + + + + + + + + + + + + + + + + + 3 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 3 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + + + + + + + + + + + + 1 + + + + + + + + + + 2 + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + evaluate + + + java.lang.Integer + + + java.lang.Integer + + + + + + + + + + + + + 1 + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPNegative + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 2 + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFOPBitNot + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + key + + + + + java.lang.String + + + + + + + + + value + + + + + + + + + + + + + + + + + + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + + + dest1 + + + + + + + + + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + + + + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + + + name + dest1 + + + serialization.ddl + struct dest1 { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + file.outputformat + org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + + location + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/dest1 + + + + + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe + + + + + + + + + + + diff --git a/ql/src/test/results/compiler/plan/union.q.xml b/ql/src/test/results/compiler/plan/union.q.xml index 9650f1959..cea05478d 100644 --- a/ql/src/test/results/compiler/plan/union.q.xml +++ b/ql/src/test/results/compiler/plan/union.q.xml @@ -21,11 +21,14 @@ + + key,value + true - /tmp/hive-zshao/831940234.10000.insclause-0 + /tmp/hive-njain/635791418/469963157.10000.insclause-0 ../../../../build/contrib/hive/ql/test/data/warehouse/union.out @@ -76,7 +79,7 @@ - /tmp/hive-zshao/831940234.10000.insclause-0 + /tmp/hive-njain/635791418/469963157.10000.insclause-0 @@ -525,7 +528,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src null-subquery1:unioninput-subquery1:src @@ -540,7 +543,7 @@ - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src @@ -563,8 +566,8 @@ src - bucket_field_name - + serialization.ddl + struct src { string key, string value} serialization.format @@ -574,17 +577,13 @@ columns key,value - - field_to_dimension - - bucket_count -1 serialization.lib - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat @@ -596,12 +595,12 @@ location - file:/data/zshao/trunk/VENDOR/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src + file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/src - simple_meta + org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe diff --git a/ql/src/test/templates/TestNegativeCliDriver.vm b/ql/src/test/templates/TestNegativeCliDriver.vm new file mode 100644 index 000000000..6f7c3c0dd --- /dev/null +++ b/ql/src/test/templates/TestNegativeCliDriver.vm @@ -0,0 +1,83 @@ +package org.apache.hadoop.hive.cli; + +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestSuite; + +import java.io.*; +import java.util.*; + +import org.apache.hadoop.hive.ql.QTestUtil; +import org.apache.hadoop.hive.ql.exec.Task; + +import org.antlr.runtime.*; +import org.antlr.runtime.tree.*; + +public class $className extends TestCase { + + private QTestUtil qt; + + public $className(String name) { + super(name); + qt = null; + } + + @Override + protected void setUp() { + try { + qt = new QTestUtil("$resultsDir.getCanonicalPath()"); + +#foreach ($qf in $qfiles) + qt.addFile("$qf.getCanonicalPath()"); +#end + } + catch (Throwable e) { + e.printStackTrace(); + System.out.flush(); + fail("Unexpected exception in setup"); + } + } + + public static Test suite() { + TestSuite suite = new TestSuite(); +#foreach ($qf in $qfiles) + #set ($fname = $qf.getName()) + #set ($eidx = $fname.length() - 2) + #set ($tname = $fname.substring(0, $eidx)) + suite.addTest(new $className("testNegativeCliDriver_$tname")); +#end + return suite; + } + +#foreach ($qf in $qfiles) + #set ($fname = $qf.getName()) + #set ($eidx = $fname.length() - 2) + #set ($tname = $fname.substring(0, $eidx)) + public void testNegativeCliDriver_$tname() throws Exception { + try { + System.out.println("Begin query: " + "$fname"); + qt.cliInit("$fname"); + int ecode = qt.executeClient("$fname"); + if (ecode == 0) { + fail("Client Execution failed with error code = " + ecode); + } + + ecode = qt.checkCliDriverResults("$fname"); + if (ecode != 0) { + fail("Client execution results dailed with error code = " + ecode); + } + } + catch (Throwable e) { + System.out.println("Exception: " + e.getMessage()); + e.printStackTrace(); + System.out.flush(); + fail("Unexpected exception"); + } + + System.out.println("Done query: " + "$fname"); + assertTrue("Test passed", true); + } + +#end +} + diff --git a/ql/src/test/templates/TestParseNegative.vm b/ql/src/test/templates/TestParseNegative.vm index 24fcd57d0..863cc7835 100755 --- a/ql/src/test/templates/TestParseNegative.vm +++ b/ql/src/test/templates/TestParseNegative.vm @@ -63,10 +63,16 @@ public class $className extends TestCase { fail("Unexpected success for query: " + "$fname"); } catch (ParseException pe) { - qt.checkNegativeResults("$fname", pe); + int ecode = qt.checkNegativeResults("$fname", pe); + if (ecode != 0) { + fail("failed with error code = " + ecode); + } } catch (SemanticException se) { - qt.checkNegativeResults("$fname", se); + int ecode = qt.checkNegativeResults("$fname", se); + if (ecode != 0) { + fail("failed with error code = " + ecode); + } } catch (Throwable e) { System.out.println("Exception: " + e.getMessage()); diff --git a/serde/build.xml b/serde/build.xml index caaefb073..d23f5287a 100644 --- a/serde/build.xml +++ b/serde/build.xml @@ -26,8 +26,17 @@ to call at top-level: ant deploy-contrib compile-core-test - - + + + + + + + + Executing thrift (which needs to be in your path) to build thrift test classes... + + + diff --git a/serde/if/serde.thrift b/serde/if/serde.thrift index 2a71490e8..d75417e33 100644 --- a/serde/if/serde.thrift +++ b/serde/if/serde.thrift @@ -8,6 +8,7 @@ const string SERIALIZATION_LIB = "serialization.lib" const string SERIALIZATION_CLASS = "serialization.class" const string SERIALIZATION_FORMAT = "serialization.format" const string SERIALIZATION_DDL = "serialization.ddl" +const string SERIALIZATION_NULL_FORMAT = "serialization.null.format" const string FIELD_DELIM = "field.delim" const string COLLECTION_DELIM = "colelction.delim" diff --git a/serde/if/test/complex.thrift b/serde/if/test/complex.thrift new file mode 100644 index 000000000..58bd6111e --- /dev/null +++ b/serde/if/test/complex.thrift @@ -0,0 +1,15 @@ +namespace java org.apache.hadoop.hive.serde2.thrift.test + +struct IntString { + 1: i32 myint; + 2: string myString; +} + +struct Complex { + 1: i32 aint; + 2: string aString; + 3: list lint; + 4: list lString; + 5: list lintString; + 6: map mStringString; +} diff --git a/serde/if/testthrift.thrift b/serde/if/test/testthrift.thrift similarity index 100% rename from serde/if/testthrift.thrift rename to serde/if/test/testthrift.thrift diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java b/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java index e35f374ac..dee4613ae 100644 --- a/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java @@ -23,6 +23,8 @@ public class Constants { public static final String SERIALIZATION_DDL = "serialization.ddl"; + public static final String SERIALIZATION_NULL_FORMAT = "serialization.null.format"; + public static final String FIELD_DELIM = "field.delim"; public static final String COLLECTION_DELIM = "colelction.delim"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/thrift/Constants.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java similarity index 69% rename from ql/src/java/org/apache/hadoop/hive/ql/thrift/Constants.java rename to serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java index ab72da1c1..19b0b25a3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/thrift/Constants.java +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeAsync.java @@ -16,21 +16,17 @@ * limitations under the License. */ -/** - * Autogenerated by Thrift - * - * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING - */ -package org.apache.hadoop.hive.ql.thrift; +/* Generated By:JJTree: Do not edit this line. DynamicSerDeAsync.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; -import java.util.List; -import java.util.ArrayList; -import java.util.Map; -import java.util.HashMap; -import java.util.Set; -import java.util.HashSet; -import com.facebook.thrift.*; +public class DynamicSerDeAsync extends SimpleNode { + public DynamicSerDeAsync(int id) { + super(id); + } -public class Constants { + public DynamicSerDeAsync(thrift_grammar p, int id) { + super(p, id); + } } diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java new file mode 100644 index 000000000..a45f40caa --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeCommaOrSemicolon.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeCommaOrSemicolon.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeCommaOrSemicolon extends SimpleNode { + public DynamicSerDeCommaOrSemicolon(int id) { + super(id); + } + + public DynamicSerDeCommaOrSemicolon(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConst.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConst.java new file mode 100644 index 000000000..175d05863 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConst.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConst.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConst extends SimpleNode { + public DynamicSerDeConst(int id) { + super(id); + } + + public DynamicSerDeConst(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstList.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstList.java new file mode 100644 index 000000000..9342e0509 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstList.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstList.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstList extends SimpleNode { + public DynamicSerDeConstList(int id) { + super(id); + } + + public DynamicSerDeConstList(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstListContents.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstListContents.java new file mode 100644 index 000000000..50457bac9 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstListContents.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstListContents.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstListContents extends SimpleNode { + public DynamicSerDeConstListContents(int id) { + super(id); + } + + public DynamicSerDeConstListContents(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java new file mode 100644 index 000000000..6beb757ab --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMap.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstMap.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstMap extends SimpleNode { + public DynamicSerDeConstMap(int id) { + super(id); + } + + public DynamicSerDeConstMap(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java new file mode 100644 index 000000000..529571510 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstMapContents.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstMapContents.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstMapContents extends SimpleNode { + public DynamicSerDeConstMapContents(int id) { + super(id); + } + + public DynamicSerDeConstMapContents(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstValue.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstValue.java new file mode 100644 index 000000000..9006e9bb9 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeConstValue.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeConstValue.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeConstValue extends SimpleNode { + public DynamicSerDeConstValue(int id) { + super(id); + } + + public DynamicSerDeConstValue(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java new file mode 100644 index 000000000..3ada5bf87 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinition.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeDefinition.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeDefinition extends SimpleNode { + public DynamicSerDeDefinition(int id) { + super(id); + } + + public DynamicSerDeDefinition(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinitionType.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinitionType.java new file mode 100644 index 000000000..fbce1662e --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeDefinitionType.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeDefinitionType.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeDefinitionType extends SimpleNode { + public DynamicSerDeDefinitionType(int id) { + super(id); + } + + public DynamicSerDeDefinitionType(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnum.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnum.java new file mode 100644 index 000000000..685dbbf32 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnum.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeEnum.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeEnum extends SimpleNode { + public DynamicSerDeEnum(int id) { + super(id); + } + + public DynamicSerDeEnum(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDef.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDef.java new file mode 100644 index 000000000..9c7c2c6e7 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDef.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeEnumDef.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeEnumDef extends SimpleNode { + public DynamicSerDeEnumDef(int id) { + super(id); + } + + public DynamicSerDeEnumDef(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDefList.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDefList.java new file mode 100644 index 000000000..ed84579b4 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeEnumDefList.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeEnumDefList.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeEnumDefList extends SimpleNode { + public DynamicSerDeEnumDefList(int id) { + super(id); + } + + public DynamicSerDeEnumDefList(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java new file mode 100644 index 000000000..2bb174879 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeExtends.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeExtends.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeExtends extends SimpleNode { + public DynamicSerDeExtends(int id) { + super(id); + } + + public DynamicSerDeExtends(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java new file mode 100644 index 000000000..e6f8cf4cb --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldRequiredness.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeFieldRequiredness.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeFieldRequiredness extends SimpleNode { + public DynamicSerDeFieldRequiredness(int id) { + super(id); + } + + public DynamicSerDeFieldRequiredness(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldValue.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldValue.java new file mode 100644 index 000000000..523ddd910 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldValue.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeFieldValue.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeFieldValue extends SimpleNode { + public DynamicSerDeFieldValue(int id) { + super(id); + } + + public DynamicSerDeFieldValue(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java new file mode 100644 index 000000000..42fb70d11 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFlagArgs.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeFlagArgs.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeFlagArgs extends SimpleNode { + public DynamicSerDeFlagArgs(int id) { + super(id); + } + + public DynamicSerDeFlagArgs(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java new file mode 100644 index 000000000..20faa0b89 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunctionType.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeFunctionType.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeFunctionType extends SimpleNode { + public DynamicSerDeFunctionType(int id) { + super(id); + } + + public DynamicSerDeFunctionType(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeader.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeader.java new file mode 100644 index 000000000..46511d765 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeader.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeHeader.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeHeader extends SimpleNode { + public DynamicSerDeHeader(int id) { + super(id); + } + + public DynamicSerDeHeader(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeaderList.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeaderList.java new file mode 100644 index 000000000..1aa2b7cc0 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeHeaderList.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeHeaderList.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeHeaderList extends SimpleNode { + public DynamicSerDeHeaderList(int id) { + super(id); + } + + public DynamicSerDeHeaderList(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java new file mode 100644 index 000000000..68a1e0447 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeInclude.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeInclude.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeInclude extends SimpleNode { + public DynamicSerDeInclude(int id) { + super(id); + } + + public DynamicSerDeInclude(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java new file mode 100644 index 000000000..9213406c3 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeNamespace.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeNamespace.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeNamespace extends SimpleNode { + public DynamicSerDeNamespace(int id) { + super(id); + } + + public DynamicSerDeNamespace(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java new file mode 100644 index 000000000..ceac7f318 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenum.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenum.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeSenum extends SimpleNode { + public DynamicSerDeSenum(int id) { + super(id); + } + + public DynamicSerDeSenum(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java new file mode 100644 index 000000000..4483364e9 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDef.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenumDef.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeSenumDef extends SimpleNode { + public DynamicSerDeSenumDef(int id) { + super(id); + } + + public DynamicSerDeSenumDef(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java new file mode 100644 index 000000000..73210b310 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeSenumDefList.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeSenumDefList.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeSenumDefList extends SimpleNode { + public DynamicSerDeSenumDefList(int id) { + super(id); + } + + public DynamicSerDeSenumDefList(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeService.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeService.java new file mode 100644 index 000000000..e95a75845 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeService.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeService.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeService extends SimpleNode { + public DynamicSerDeService(int id) { + super(id); + } + + public DynamicSerDeService(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java new file mode 100644 index 000000000..890d69a37 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStart.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeStart.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeStart extends SimpleNode { + public DynamicSerDeStart(int id) { + super(id); + } + + public DynamicSerDeStart(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java new file mode 100644 index 000000000..f705143ba --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeThrows.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeThrows.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeThrows extends SimpleNode { + public DynamicSerDeThrows(int id) { + super(id); + } + + public DynamicSerDeThrows(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDefinition.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDefinition.java new file mode 100644 index 000000000..8415be883 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDefinition.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeTypeDefinition.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeTypeDefinition extends SimpleNode { + public DynamicSerDeTypeDefinition(int id) { + super(id); + } + + public DynamicSerDeTypeDefinition(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeUnflagArgs.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeUnflagArgs.java new file mode 100644 index 000000000..618c90dbc --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeUnflagArgs.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeUnflagArgs.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeUnflagArgs extends SimpleNode { + public DynamicSerDeUnflagArgs(int id) { + super(id); + } + + public DynamicSerDeUnflagArgs(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java new file mode 100644 index 000000000..8fca7a09b --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeXception.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Generated By:JJTree: Do not edit this line. DynamicSerDeXception.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class DynamicSerDeXception extends SimpleNode { + public DynamicSerDeXception(int id) { + super(id); + } + + public DynamicSerDeXception(thrift_grammar p, int id) { + super(p, id); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java new file mode 100644 index 000000000..aff4a0b75 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java @@ -0,0 +1,123 @@ +/* Generated By:JJTree: Do not edit this line. /data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/JJTthrift_grammarState.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +class JJTthrift_grammarState { + private java.util.Stack nodes; + private java.util.Stack marks; + + private int sp; // number of nodes on stack + private int mk; // current mark + private boolean node_created; + + JJTthrift_grammarState() { + nodes = new java.util.Stack(); + marks = new java.util.Stack(); + sp = 0; + mk = 0; + } + + /* Determines whether the current node was actually closed and + pushed. This should only be called in the final user action of a + node scope. */ + boolean nodeCreated() { + return node_created; + } + + /* Call this to reinitialize the node stack. It is called + automatically by the parser's ReInit() method. */ + void reset() { + nodes.removeAllElements(); + marks.removeAllElements(); + sp = 0; + mk = 0; + } + + /* Returns the root node of the AST. It only makes sense to call + this after a successful parse. */ + Node rootNode() { + return (Node)nodes.elementAt(0); + } + + /* Pushes a node on to the stack. */ + void pushNode(Node n) { + nodes.push(n); + ++sp; + } + + /* Returns the node on the top of the stack, and remove it from the + stack. */ + Node popNode() { + if (--sp < mk) { + mk = ((Integer)marks.pop()).intValue(); + } + return (Node)nodes.pop(); + } + + /* Returns the node currently on the top of the stack. */ + Node peekNode() { + return (Node)nodes.peek(); + } + + /* Returns the number of children on the stack in the current node + scope. */ + int nodeArity() { + return sp - mk; + } + + + void clearNodeScope(Node n) { + while (sp > mk) { + popNode(); + } + mk = ((Integer)marks.pop()).intValue(); + } + + + void openNodeScope(Node n) { + marks.push(new Integer(mk)); + mk = sp; + n.jjtOpen(); + } + + + /* A definite node is constructed from a specified number of + children. That number of nodes are popped from the stack and + made the children of the definite node. Then the definite node + is pushed on to the stack. */ + void closeNodeScope(Node n, int num) { + mk = ((Integer)marks.pop()).intValue(); + while (num-- > 0) { + Node c = popNode(); + c.jjtSetParent(n); + n.jjtAddChild(c, num); + } + n.jjtClose(); + pushNode(n); + node_created = true; + } + + + /* A conditional node is constructed if its condition is true. All + the nodes that have been pushed since the node was opened are + made children of the the conditional node, which is then pushed + on to the stack. If the condition is false the node is not + constructed and they are left on the stack. */ + void closeNodeScope(Node n, boolean condition) { + if (condition) { + int a = nodeArity(); + mk = ((Integer)marks.pop()).intValue(); + while (a-- > 0) { + Node c = popNode(); + c.jjtSetParent(n); + n.jjtAddChild(c, a); + } + n.jjtClose(); + pushNode(n); + node_created = true; + } else { + mk = ((Integer)marks.pop()).intValue(); + node_created = false; + } + } +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java new file mode 100644 index 000000000..517447da5 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Node.java @@ -0,0 +1,34 @@ +/* Generated By:JJTree: Do not edit this line. Node.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +/* All AST nodes must implement this interface. It provides basic + machinery for constructing the parent and child relationships + between nodes. */ + +public interface Node { + + /** This method is called after the node has been made the current + node. It indicates that child nodes can now be added to it. */ + public void jjtOpen(); + + /** This method is called after all the child nodes have been + added. */ + public void jjtClose(); + + /** This pair of methods are used to inform the node of its + parent. */ + public void jjtSetParent(Node n); + public Node jjtGetParent(); + + /** This method tells the node to add its argument to the node's + list of children. */ + public void jjtAddChild(Node n, int i); + + /** This method returns a child node. The children are numbered + from zero, left to right. */ + public Node jjtGetChild(int i); + + /** Return the number of children the node has. */ + public int jjtGetNumChildren(); +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java new file mode 100644 index 000000000..bc833a13a --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java @@ -0,0 +1,192 @@ +/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +/** + * This exception is thrown when parse errors are encountered. + * You can explicitly create objects of this exception type by + * calling the method generateParseException in the generated + * parser. + * + * You can modify this class to customize your error reporting + * mechanisms so long as you retain the public fields. + */ +public class ParseException extends Exception { + + /** + * This constructor is used by the method "generateParseException" + * in the generated parser. Calling this constructor generates + * a new object of this type with the fields "currentToken", + * "expectedTokenSequences", and "tokenImage" set. The boolean + * flag "specialConstructor" is also set to true to indicate that + * this constructor was used to create this object. + * This constructor calls its super class with the empty string + * to force the "toString" method of parent class "Throwable" to + * print the error message in the form: + * ParseException: + */ + public ParseException(Token currentTokenVal, + int[][] expectedTokenSequencesVal, + String[] tokenImageVal + ) + { + super(""); + specialConstructor = true; + currentToken = currentTokenVal; + expectedTokenSequences = expectedTokenSequencesVal; + tokenImage = tokenImageVal; + } + + /** + * The following constructors are for use by you for whatever + * purpose you can think of. Constructing the exception in this + * manner makes the exception behave in the normal way - i.e., as + * documented in the class "Throwable". The fields "errorToken", + * "expectedTokenSequences", and "tokenImage" do not contain + * relevant information. The JavaCC generated code does not use + * these constructors. + */ + + public ParseException() { + super(); + specialConstructor = false; + } + + public ParseException(String message) { + super(message); + specialConstructor = false; + } + + /** + * This variable determines which constructor was used to create + * this object and thereby affects the semantics of the + * "getMessage" method (see below). + */ + protected boolean specialConstructor; + + /** + * This is the last token that has been consumed successfully. If + * this object has been created due to a parse error, the token + * followng this token will (therefore) be the first error token. + */ + public Token currentToken; + + /** + * Each entry in this array is an array of integers. Each array + * of integers represents a sequence of tokens (by their ordinal + * values) that is expected at this point of the parse. + */ + public int[][] expectedTokenSequences; + + /** + * This is a reference to the "tokenImage" array of the generated + * parser within which the parse error occurred. This array is + * defined in the generated ...Constants interface. + */ + public String[] tokenImage; + + /** + * This method has the standard behavior when this object has been + * created using the standard constructors. Otherwise, it uses + * "currentToken" and "expectedTokenSequences" to generate a parse + * error message and returns it. If this object has been created + * due to a parse error, and you do not catch it (it gets thrown + * from the parser), then this method is called during the printing + * of the final stack trace, and hence the correct error message + * gets displayed. + */ + public String getMessage() { + if (!specialConstructor) { + return super.getMessage(); + } + StringBuffer expected = new StringBuffer(); + int maxSize = 0; + for (int i = 0; i < expectedTokenSequences.length; i++) { + if (maxSize < expectedTokenSequences[i].length) { + maxSize = expectedTokenSequences[i].length; + } + for (int j = 0; j < expectedTokenSequences[i].length; j++) { + expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" "); + } + if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) { + expected.append("..."); + } + expected.append(eol).append(" "); + } + String retval = "Encountered \""; + Token tok = currentToken.next; + for (int i = 0; i < maxSize; i++) { + if (i != 0) retval += " "; + if (tok.kind == 0) { + retval += tokenImage[0]; + break; + } + retval += add_escapes(tok.image); + tok = tok.next; + } + retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; + retval += "." + eol; + if (expectedTokenSequences.length == 1) { + retval += "Was expecting:" + eol + " "; + } else { + retval += "Was expecting one of:" + eol + " "; + } + retval += expected.toString(); + return retval; + } + + /** + * The end of line string for this machine. + */ + protected String eol = System.getProperty("line.separator", "\n"); + + /** + * Used to convert raw characters to their escaped version + * when these raw version cannot be used as part of an ASCII + * string literal. + */ + protected String add_escapes(String str) { + StringBuffer retval = new StringBuffer(); + char ch; + for (int i = 0; i < str.length(); i++) { + switch (str.charAt(i)) + { + case 0 : + continue; + case '\b': + retval.append("\\b"); + continue; + case '\t': + retval.append("\\t"); + continue; + case '\n': + retval.append("\\n"); + continue; + case '\f': + retval.append("\\f"); + continue; + case '\r': + retval.append("\\r"); + continue; + case '\"': + retval.append("\\\""); + continue; + case '\'': + retval.append("\\\'"); + continue; + case '\\': + retval.append("\\\\"); + continue; + default: + if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { + String s = "0000" + Integer.toString(ch, 16); + retval.append("\\u" + s.substring(s.length() - 4, s.length())); + } else { + retval.append(ch); + } + continue; + } + } + return retval.toString(); + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java new file mode 100644 index 000000000..9d45dbc73 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleCharStream.java @@ -0,0 +1,439 @@ +/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +/** + * An implementation of interface CharStream, where the stream is assumed to + * contain only ASCII characters (without unicode processing). + */ + +public class SimpleCharStream +{ + public static final boolean staticFlag = false; + int bufsize; + int available; + int tokenBegin; + public int bufpos = -1; + protected int bufline[]; + protected int bufcolumn[]; + + protected int column = 0; + protected int line = 1; + + protected boolean prevCharIsCR = false; + protected boolean prevCharIsLF = false; + + protected java.io.Reader inputStream; + + protected char[] buffer; + protected int maxNextCharInd = 0; + protected int inBuf = 0; + protected int tabSize = 8; + + protected void setTabSize(int i) { tabSize = i; } + protected int getTabSize(int i) { return tabSize; } + + + protected void ExpandBuff(boolean wrapAround) + { + char[] newbuffer = new char[bufsize + 2048]; + int newbufline[] = new int[bufsize + 2048]; + int newbufcolumn[] = new int[bufsize + 2048]; + + try + { + if (wrapAround) + { + System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); + System.arraycopy(buffer, 0, newbuffer, + bufsize - tokenBegin, bufpos); + buffer = newbuffer; + + System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); + System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); + bufline = newbufline; + + System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); + System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); + bufcolumn = newbufcolumn; + + maxNextCharInd = (bufpos += (bufsize - tokenBegin)); + } + else + { + System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); + buffer = newbuffer; + + System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); + bufline = newbufline; + + System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); + bufcolumn = newbufcolumn; + + maxNextCharInd = (bufpos -= tokenBegin); + } + } + catch (Throwable t) + { + throw new Error(t.getMessage()); + } + + + bufsize += 2048; + available = bufsize; + tokenBegin = 0; + } + + protected void FillBuff() throws java.io.IOException + { + if (maxNextCharInd == available) + { + if (available == bufsize) + { + if (tokenBegin > 2048) + { + bufpos = maxNextCharInd = 0; + available = tokenBegin; + } + else if (tokenBegin < 0) + bufpos = maxNextCharInd = 0; + else + ExpandBuff(false); + } + else if (available > tokenBegin) + available = bufsize; + else if ((tokenBegin - available) < 2048) + ExpandBuff(true); + else + available = tokenBegin; + } + + int i; + try { + if ((i = inputStream.read(buffer, maxNextCharInd, + available - maxNextCharInd)) == -1) + { + inputStream.close(); + throw new java.io.IOException(); + } + else + maxNextCharInd += i; + return; + } + catch(java.io.IOException e) { + --bufpos; + backup(0); + if (tokenBegin == -1) + tokenBegin = bufpos; + throw e; + } + } + + public char BeginToken() throws java.io.IOException + { + tokenBegin = -1; + char c = readChar(); + tokenBegin = bufpos; + + return c; + } + + protected void UpdateLineColumn(char c) + { + column++; + + if (prevCharIsLF) + { + prevCharIsLF = false; + line += (column = 1); + } + else if (prevCharIsCR) + { + prevCharIsCR = false; + if (c == '\n') + { + prevCharIsLF = true; + } + else + line += (column = 1); + } + + switch (c) + { + case '\r' : + prevCharIsCR = true; + break; + case '\n' : + prevCharIsLF = true; + break; + case '\t' : + column--; + column += (tabSize - (column % tabSize)); + break; + default : + break; + } + + bufline[bufpos] = line; + bufcolumn[bufpos] = column; + } + + public char readChar() throws java.io.IOException + { + if (inBuf > 0) + { + --inBuf; + + if (++bufpos == bufsize) + bufpos = 0; + + return buffer[bufpos]; + } + + if (++bufpos >= maxNextCharInd) + FillBuff(); + + char c = buffer[bufpos]; + + UpdateLineColumn(c); + return (c); + } + + /** + * @deprecated + * @see #getEndColumn + */ + + public int getColumn() { + return bufcolumn[bufpos]; + } + + /** + * @deprecated + * @see #getEndLine + */ + + public int getLine() { + return bufline[bufpos]; + } + + public int getEndColumn() { + return bufcolumn[bufpos]; + } + + public int getEndLine() { + return bufline[bufpos]; + } + + public int getBeginColumn() { + return bufcolumn[tokenBegin]; + } + + public int getBeginLine() { + return bufline[tokenBegin]; + } + + public void backup(int amount) { + + inBuf += amount; + if ((bufpos -= amount) < 0) + bufpos += bufsize; + } + + public SimpleCharStream(java.io.Reader dstream, int startline, + int startcolumn, int buffersize) + { + inputStream = dstream; + line = startline; + column = startcolumn - 1; + + available = bufsize = buffersize; + buffer = new char[buffersize]; + bufline = new int[buffersize]; + bufcolumn = new int[buffersize]; + } + + public SimpleCharStream(java.io.Reader dstream, int startline, + int startcolumn) + { + this(dstream, startline, startcolumn, 4096); + } + + public SimpleCharStream(java.io.Reader dstream) + { + this(dstream, 1, 1, 4096); + } + public void ReInit(java.io.Reader dstream, int startline, + int startcolumn, int buffersize) + { + inputStream = dstream; + line = startline; + column = startcolumn - 1; + + if (buffer == null || buffersize != buffer.length) + { + available = bufsize = buffersize; + buffer = new char[buffersize]; + bufline = new int[buffersize]; + bufcolumn = new int[buffersize]; + } + prevCharIsLF = prevCharIsCR = false; + tokenBegin = inBuf = maxNextCharInd = 0; + bufpos = -1; + } + + public void ReInit(java.io.Reader dstream, int startline, + int startcolumn) + { + ReInit(dstream, startline, startcolumn, 4096); + } + + public void ReInit(java.io.Reader dstream) + { + ReInit(dstream, 1, 1, 4096); + } + public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, + int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException + { + this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); + } + + public SimpleCharStream(java.io.InputStream dstream, int startline, + int startcolumn, int buffersize) + { + this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); + } + + public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, + int startcolumn) throws java.io.UnsupportedEncodingException + { + this(dstream, encoding, startline, startcolumn, 4096); + } + + public SimpleCharStream(java.io.InputStream dstream, int startline, + int startcolumn) + { + this(dstream, startline, startcolumn, 4096); + } + + public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException + { + this(dstream, encoding, 1, 1, 4096); + } + + public SimpleCharStream(java.io.InputStream dstream) + { + this(dstream, 1, 1, 4096); + } + + public void ReInit(java.io.InputStream dstream, String encoding, int startline, + int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException + { + ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); + } + + public void ReInit(java.io.InputStream dstream, int startline, + int startcolumn, int buffersize) + { + ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); + } + + public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException + { + ReInit(dstream, encoding, 1, 1, 4096); + } + + public void ReInit(java.io.InputStream dstream) + { + ReInit(dstream, 1, 1, 4096); + } + public void ReInit(java.io.InputStream dstream, String encoding, int startline, + int startcolumn) throws java.io.UnsupportedEncodingException + { + ReInit(dstream, encoding, startline, startcolumn, 4096); + } + public void ReInit(java.io.InputStream dstream, int startline, + int startcolumn) + { + ReInit(dstream, startline, startcolumn, 4096); + } + public String GetImage() + { + if (bufpos >= tokenBegin) + return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); + else + return new String(buffer, tokenBegin, bufsize - tokenBegin) + + new String(buffer, 0, bufpos + 1); + } + + public char[] GetSuffix(int len) + { + char[] ret = new char[len]; + + if ((bufpos + 1) >= len) + System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); + else + { + System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, + len - bufpos - 1); + System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); + } + + return ret; + } + + public void Done() + { + buffer = null; + bufline = null; + bufcolumn = null; + } + + /** + * Method to adjust line and column numbers for the start of a token. + */ + public void adjustBeginLineColumn(int newLine, int newCol) + { + int start = tokenBegin; + int len; + + if (bufpos >= tokenBegin) + { + len = bufpos - tokenBegin + inBuf + 1; + } + else + { + len = bufsize - tokenBegin + bufpos + 1 + inBuf; + } + + int i = 0, j = 0, k = 0; + int nextColDiff = 0, columnDiff = 0; + + while (i < len && + bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) + { + bufline[j] = newLine; + nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; + bufcolumn[j] = newCol + columnDiff; + columnDiff = nextColDiff; + i++; + } + + if (i < len) + { + bufline[j] = newLine++; + bufcolumn[j] = newCol + columnDiff; + + while (i++ < len) + { + if (bufline[j = start % bufsize] != bufline[++start % bufsize]) + bufline[j] = newLine++; + else + bufline[j] = newLine; + } + } + + line = bufline[j]; + column = bufcolumn[j]; + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java new file mode 100644 index 000000000..f3f6ec426 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/SimpleNode.java @@ -0,0 +1,72 @@ +/* Generated By:JJTree: Do not edit this line. SimpleNode.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class SimpleNode implements Node { + protected Node parent; + protected Node[] children; + protected int id; + protected thrift_grammar parser; + + public SimpleNode(int i) { + id = i; + } + + public SimpleNode(thrift_grammar p, int i) { + this(i); + parser = p; + } + + public void jjtOpen() { + } + + public void jjtClose() { + } + + public void jjtSetParent(Node n) { parent = n; } + public Node jjtGetParent() { return parent; } + + public void jjtAddChild(Node n, int i) { + if (children == null) { + children = new Node[i + 1]; + } else if (i >= children.length) { + Node c[] = new Node[i + 1]; + System.arraycopy(children, 0, c, 0, children.length); + children = c; + } + children[i] = n; + } + + public Node jjtGetChild(int i) { + return children[i]; + } + + public int jjtGetNumChildren() { + return (children == null) ? 0 : children.length; + } + + /* You can override these two methods in subclasses of SimpleNode to + customize the way the node appears when the tree is dumped. If + your output uses more than one line you should override + toString(String), otherwise overriding toString() is probably all + you need to do. */ + + public String toString() { return thrift_grammarTreeConstants.jjtNodeName[id]; } + public String toString(String prefix) { return prefix + toString(); } + + /* Override this method if you want to customize how the node dumps + out its children. */ + + public void dump(String prefix) { + System.out.println(toString(prefix)); + if (children != null) { + for (int i = 0; i < children.length; ++i) { + SimpleNode n = (SimpleNode)children[i]; + if (n != null) { + n.dump(prefix + " "); + } + } + } + } +} + diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Token.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Token.java new file mode 100644 index 000000000..f14539525 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/Token.java @@ -0,0 +1,81 @@ +/* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +/** + * Describes the input token stream. + */ + +public class Token { + + /** + * An integer that describes the kind of this token. This numbering + * system is determined by JavaCCParser, and a table of these numbers is + * stored in the file ...Constants.java. + */ + public int kind; + + /** + * beginLine and beginColumn describe the position of the first character + * of this token; endLine and endColumn describe the position of the + * last character of this token. + */ + public int beginLine, beginColumn, endLine, endColumn; + + /** + * The string image of the token. + */ + public String image; + + /** + * A reference to the next regular (non-special) token from the input + * stream. If this is the last token from the input stream, or if the + * token manager has not read tokens beyond this one, this field is + * set to null. This is true only if this token is also a regular + * token. Otherwise, see below for a description of the contents of + * this field. + */ + public Token next; + + /** + * This field is used to access special tokens that occur prior to this + * token, but after the immediately preceding regular (non-special) token. + * If there are no such special tokens, this field is set to null. + * When there are more than one such special token, this field refers + * to the last of these special tokens, which in turn refers to the next + * previous special token through its specialToken field, and so on + * until the first special token (whose specialToken field is null). + * The next fields of special tokens refer to other special tokens that + * immediately follow it (without an intervening regular token). If there + * is no such token, this field is null. + */ + public Token specialToken; + + /** + * Returns the image. + */ + public String toString() + { + return image; + } + + /** + * Returns a new Token object, by default. However, if you want, you + * can create and return subclass objects based on the value of ofKind. + * Simply add the cases to the switch for all those special cases. + * For example, if you have a subclass of Token called IDToken that + * you want to create if ofKind is ID, simlpy add something like : + * + * case MyParserConstants.ID : return new IDToken(); + * + * to the following switch statement. Then you can cast matchedToken + * variable to the appropriate type and use it in your lexical actions. + */ + public static final Token newToken(int ofKind) + { + switch(ofKind) + { + default : return new Token(); + } + } + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java new file mode 100644 index 000000000..ef1afcd1c --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java @@ -0,0 +1,133 @@ +/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +public class TokenMgrError extends Error +{ + /* + * Ordinals for various reasons why an Error of this type can be thrown. + */ + + /** + * Lexical error occured. + */ + static final int LEXICAL_ERROR = 0; + + /** + * An attempt wass made to create a second instance of a static token manager. + */ + static final int STATIC_LEXER_ERROR = 1; + + /** + * Tried to change to an invalid lexical state. + */ + static final int INVALID_LEXICAL_STATE = 2; + + /** + * Detected (and bailed out of) an infinite loop in the token manager. + */ + static final int LOOP_DETECTED = 3; + + /** + * Indicates the reason why the exception is thrown. It will have + * one of the above 4 values. + */ + int errorCode; + + /** + * Replaces unprintable characters by their espaced (or unicode escaped) + * equivalents in the given string + */ + protected static final String addEscapes(String str) { + StringBuffer retval = new StringBuffer(); + char ch; + for (int i = 0; i < str.length(); i++) { + switch (str.charAt(i)) + { + case 0 : + continue; + case '\b': + retval.append("\\b"); + continue; + case '\t': + retval.append("\\t"); + continue; + case '\n': + retval.append("\\n"); + continue; + case '\f': + retval.append("\\f"); + continue; + case '\r': + retval.append("\\r"); + continue; + case '\"': + retval.append("\\\""); + continue; + case '\'': + retval.append("\\\'"); + continue; + case '\\': + retval.append("\\\\"); + continue; + default: + if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { + String s = "0000" + Integer.toString(ch, 16); + retval.append("\\u" + s.substring(s.length() - 4, s.length())); + } else { + retval.append(ch); + } + continue; + } + } + return retval.toString(); + } + + /** + * Returns a detailed message for the Error when it is thrown by the + * token manager to indicate a lexical error. + * Parameters : + * EOFSeen : indicates if EOF caused the lexicl error + * curLexState : lexical state in which this error occured + * errorLine : line number when the error occured + * errorColumn : column number when the error occured + * errorAfter : prefix that was seen before this error occured + * curchar : the offending character + * Note: You can customize the lexical error message by modifying this method. + */ + protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { + return("Lexical error at line " + + errorLine + ", column " + + errorColumn + ". Encountered: " + + (EOFSeen ? " " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") + + "after : \"" + addEscapes(errorAfter) + "\""); + } + + /** + * You can also modify the body of this method to customize your error messages. + * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not + * of end-users concern, so you can return something like : + * + * "Internal Error : Please file a bug report .... " + * + * from this method for such cases in the release version of your parser. + */ + public String getMessage() { + return super.getMessage(); + } + + /* + * Constructors of various flavors follow. + */ + + public TokenMgrError() { + } + + public TokenMgrError(String message, int reason) { + super(message); + errorCode = reason; + } + + public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { + this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); + } +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java new file mode 100644 index 000000000..37cb1aacf --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java @@ -0,0 +1,2290 @@ +/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammar.java */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +import java.util.*; +import java.io.*; +import java.net.*; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.transport.*; +import org.apache.hadoop.hive.serde2.dynamic_type.*; + +public class thrift_grammar/*@bgen(jjtree)*/implements thrift_grammarTreeConstants, thrift_grammarConstants {/*@bgen(jjtree)*/ + protected JJTthrift_grammarState jjtree = new JJTthrift_grammarState(); + private List include_path = null; + + // for computing the autogenerated field ids in thrift + private int field_val; + + // store types and tables + // separately because one cannot use a table (ie service.method) as a Struct like type. + protected Map types; + protected Map tables; + + // system include path + final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; + + // need three params to differentiate between this and 2 param method auto generated since + // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. + protected thrift_grammar(InputStream is, List include_path, boolean junk) { + this(is,null); + this.types = new HashMap () ; + this.tables = new HashMap () ; + this.include_path = include_path; + this.field_val = -1; + } + + // find the file on the include path + private static File findFile(String fname, List include_path) { + for(String path: include_path) { + final String full = path + "/" + fname; + File f = new File(full); + if(f.exists()) { + return f; + } + } + return null; + } + + public static void main(String args[]) { + String filename = null; + List include_path = new ArrayList(); + + for(String path: default_include_path) { + include_path.add(path); + } + for(int i = 0; i < args.length; i++) { + String arg = args[i]; + if(arg.equals("--include") && i + 1 < args.length) { + include_path.add(args[++i]); + } + if(arg.equals("--file") && i + 1 < args.length) { + filename = args[++i]; + } + } + + InputStream is = System.in; + if(filename != null) { + try { + is = new FileInputStream(findFile(filename, include_path)); + } catch(IOException e) { + } + } + thrift_grammar t = new thrift_grammar(is,include_path,false); + + try { + t.Start(); + } catch (Exception e) { + System.out.println("Parse error."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } + + final public SimpleNode Start() throws ParseException { + /*@bgen(jjtree) Start */ + DynamicSerDeStart jjtn000 = new DynamicSerDeStart(JJTSTART); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + HeaderList(); + label_1: + while (true) { + Definition(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_const: + case tok_senum: + case tok_typedef: + case tok_struct: + case tok_exception: + case tok_service: + case tok_enum: + ; + break; + default: + jj_la1[0] = jj_gen; + break label_1; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode HeaderList() throws ParseException { + /*@bgen(jjtree) HeaderList */ + DynamicSerDeHeaderList jjtn000 = new DynamicSerDeHeaderList(JJTHEADERLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + label_2: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_namespace: + case tok_cpp_namespace: + case tok_cpp_include: + case tok_java_package: + case tok_cocoa_prefix: + case tok_csharp_namespace: + case tok_php_namespace: + case tok_py_module: + case tok_perl_package: + case tok_ruby_namespace: + case tok_smalltalk_category: + case tok_smalltalk_prefix: + case tok_xsd_namespace: + case tok_include: + ; + break; + default: + jj_la1[1] = jj_gen; + break label_2; + } + Header(); + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Header() throws ParseException { + /*@bgen(jjtree) Header */ + DynamicSerDeHeader jjtn000 = new DynamicSerDeHeader(JJTHEADER); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_include: + Include(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_namespace: + case tok_cpp_namespace: + case tok_cpp_include: + case tok_java_package: + case tok_cocoa_prefix: + case tok_csharp_namespace: + case tok_php_namespace: + case tok_py_module: + case tok_perl_package: + case tok_ruby_namespace: + case tok_smalltalk_category: + case tok_smalltalk_prefix: + case tok_xsd_namespace: + Namespace(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[2] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Namespace() throws ParseException { + /*@bgen(jjtree) Namespace */ + DynamicSerDeNamespace jjtn000 = new DynamicSerDeNamespace(JJTNAMESPACE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_namespace: + jj_consume_token(tok_namespace); + jj_consume_token(IDENTIFIER); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_cpp_namespace: + jj_consume_token(tok_cpp_namespace); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_cpp_include: + jj_consume_token(tok_cpp_include); + jj_consume_token(tok_literal); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_php_namespace: + jj_consume_token(tok_php_namespace); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_py_module: + jj_consume_token(tok_py_module); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_perl_package: + jj_consume_token(tok_perl_package); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_ruby_namespace: + jj_consume_token(tok_ruby_namespace); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_smalltalk_category: + jj_consume_token(tok_smalltalk_category); + jj_consume_token(tok_st_identifier); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_smalltalk_prefix: + jj_consume_token(tok_smalltalk_prefix); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_java_package: + jj_consume_token(tok_java_package); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_cocoa_prefix: + jj_consume_token(tok_cocoa_prefix); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_xsd_namespace: + jj_consume_token(tok_xsd_namespace); + jj_consume_token(tok_literal); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_csharp_namespace: + jj_consume_token(tok_csharp_namespace); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[3] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Include() throws ParseException { + /*@bgen(jjtree) Include */ + DynamicSerDeInclude jjtn000 = new DynamicSerDeInclude(JJTINCLUDE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000);String fname; + boolean found = false; + try { + jj_consume_token(tok_include); + fname = jj_consume_token(tok_literal).image; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + // bugbug somewhat fragile below substring expression + fname = fname.substring(1,fname.length() - 1); + + // try to find the file on the include path + File f = thrift_grammar.findFile(fname, this.include_path); + if(f != null) { + found = true; + try { + FileInputStream fis = new FileInputStream(f); + thrift_grammar t = new thrift_grammar(fis,this.include_path, false); + t.Start(); + fis.close(); + found = true; + // add in what we found to our type and table tables. + this.tables.putAll(t.tables); + this.types.putAll(t.types); + } catch (Exception e) { + System.out.println("File: " + fname + " - Oops."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } + if(!found) { + {if (true) throw new RuntimeException("include file not found: " + fname);} + } + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Definition() throws ParseException { + /*@bgen(jjtree) Definition */ + DynamicSerDeDefinition jjtn000 = new DynamicSerDeDefinition(JJTDEFINITION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_const: + Const(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_service: + Service(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_senum: + case tok_typedef: + case tok_struct: + case tok_exception: + case tok_enum: + TypeDefinition(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[4] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode TypeDefinition() throws ParseException { + /*@bgen(jjtree) TypeDefinition */ + DynamicSerDeTypeDefinition jjtn000 = new DynamicSerDeTypeDefinition(JJTTYPEDEFINITION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_typedef: + Typedef(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_enum: + Enum(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_senum: + Senum(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_struct: + Struct(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_exception: + Xception(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[5] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypedef Typedef() throws ParseException { + /*@bgen(jjtree) Typedef */ + DynamicSerDeTypedef jjtn000 = new DynamicSerDeTypedef(JJTTYPEDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_typedef); + DefinitionType(); + jjtn000.name = jj_consume_token(IDENTIFIER).image; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + // store the type for later retrieval + this.types.put(jjtn000.name, jjtn000); + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + +// returning void because we ignore this production. + final public void CommaOrSemicolon() throws ParseException { + /*@bgen(jjtree) CommaOrSemicolon */ + DynamicSerDeCommaOrSemicolon jjtn000 = new DynamicSerDeCommaOrSemicolon(JJTCOMMAORSEMICOLON); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 58: + jj_consume_token(58); + break; + case 59: + jj_consume_token(59); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + default: + jj_la1[6] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + } + + final public SimpleNode Enum() throws ParseException { + /*@bgen(jjtree) Enum */ + DynamicSerDeEnum jjtn000 = new DynamicSerDeEnum(JJTENUM); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_enum); + jj_consume_token(IDENTIFIER); + jj_consume_token(60); + EnumDefList(); + jj_consume_token(61); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode EnumDefList() throws ParseException { + /*@bgen(jjtree) EnumDefList */ + DynamicSerDeEnumDefList jjtn000 = new DynamicSerDeEnumDefList(JJTENUMDEFLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + label_3: + while (true) { + EnumDef(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case IDENTIFIER: + ; + break; + default: + jj_la1[7] = jj_gen; + break label_3; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode EnumDef() throws ParseException { + /*@bgen(jjtree) EnumDef */ + DynamicSerDeEnumDef jjtn000 = new DynamicSerDeEnumDef(JJTENUMDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(IDENTIFIER); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 62: + jj_consume_token(62); + jj_consume_token(tok_int_constant); + break; + default: + jj_la1[8] = jj_gen; + ; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 58: + case 59: + CommaOrSemicolon(); + break; + default: + jj_la1[9] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Senum() throws ParseException { + /*@bgen(jjtree) Senum */ + DynamicSerDeSenum jjtn000 = new DynamicSerDeSenum(JJTSENUM); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_senum); + jj_consume_token(IDENTIFIER); + jj_consume_token(60); + SenumDefList(); + jj_consume_token(61); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode SenumDefList() throws ParseException { + /*@bgen(jjtree) SenumDefList */ + DynamicSerDeSenumDefList jjtn000 = new DynamicSerDeSenumDefList(JJTSENUMDEFLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + label_4: + while (true) { + SenumDef(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_literal: + ; + break; + default: + jj_la1[10] = jj_gen; + break label_4; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode SenumDef() throws ParseException { + /*@bgen(jjtree) SenumDef */ + DynamicSerDeSenumDef jjtn000 = new DynamicSerDeSenumDef(JJTSENUMDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_literal); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 58: + case 59: + CommaOrSemicolon(); + break; + default: + jj_la1[11] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Const() throws ParseException { + /*@bgen(jjtree) Const */ + DynamicSerDeConst jjtn000 = new DynamicSerDeConst(JJTCONST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_const); + FieldType(); + jj_consume_token(IDENTIFIER); + jj_consume_token(62); + ConstValue(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 58: + case 59: + CommaOrSemicolon(); + break; + default: + jj_la1[12] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstValue() throws ParseException { + /*@bgen(jjtree) ConstValue */ + DynamicSerDeConstValue jjtn000 = new DynamicSerDeConstValue(JJTCONSTVALUE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + jj_consume_token(tok_int_constant); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case tok_double_constant: + jj_consume_token(tok_double_constant); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case tok_literal: + jj_consume_token(tok_literal); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case IDENTIFIER: + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case 63: + ConstList(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + case 60: + ConstMap(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[13] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstList() throws ParseException { + /*@bgen(jjtree) ConstList */ + DynamicSerDeConstList jjtn000 = new DynamicSerDeConstList(JJTCONSTLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(63); + ConstListContents(); + jj_consume_token(64); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstListContents() throws ParseException { + /*@bgen(jjtree) ConstListContents */ + DynamicSerDeConstListContents jjtn000 = new DynamicSerDeConstListContents(JJTCONSTLISTCONTENTS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + label_5: + while (true) { + ConstValue(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 58: + case 59: + CommaOrSemicolon(); + break; + default: + jj_la1[14] = jj_gen; + ; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + case tok_double_constant: + case IDENTIFIER: + case tok_literal: + case 60: + case 63: + ; + break; + default: + jj_la1[15] = jj_gen; + break label_5; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstMap() throws ParseException { + /*@bgen(jjtree) ConstMap */ + DynamicSerDeConstMap jjtn000 = new DynamicSerDeConstMap(JJTCONSTMAP); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(60); + ConstMapContents(); + jj_consume_token(61); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode ConstMapContents() throws ParseException { + /*@bgen(jjtree) ConstMapContents */ + DynamicSerDeConstMapContents jjtn000 = new DynamicSerDeConstMapContents(JJTCONSTMAPCONTENTS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + case tok_double_constant: + case IDENTIFIER: + case tok_literal: + case 60: + case 63: + label_6: + while (true) { + ConstValue(); + jj_consume_token(65); + ConstValue(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 58: + case 59: + CommaOrSemicolon(); + break; + default: + jj_la1[16] = jj_gen; + ; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + case tok_double_constant: + case IDENTIFIER: + case tok_literal: + case 60: + case 63: + ; + break; + default: + jj_la1[17] = jj_gen; + break label_6; + } + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + default: + jj_la1[18] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeStruct Struct() throws ParseException { + /*@bgen(jjtree) Struct */ + DynamicSerDeStruct jjtn000 = new DynamicSerDeStruct(JJTSTRUCT); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_struct); + jjtn000.name = jj_consume_token(IDENTIFIER).image; + jj_consume_token(60); + FieldList(); + jj_consume_token(61); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + this.types.put(jjtn000.name,jjtn000); + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Xception() throws ParseException { + /*@bgen(jjtree) Xception */ + DynamicSerDeXception jjtn000 = new DynamicSerDeXception(JJTXCEPTION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_exception); + jj_consume_token(IDENTIFIER); + jj_consume_token(60); + FieldList(); + jj_consume_token(61); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Service() throws ParseException { + /*@bgen(jjtree) Service */ + DynamicSerDeService jjtn000 = new DynamicSerDeService(JJTSERVICE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_service); + jj_consume_token(IDENTIFIER); + Extends(); + jj_consume_token(60); + FlagArgs(); + label_7: + while (true) { + Function(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_void: + case tok_bool: + case tok_i16: + case tok_i32: + case tok_i64: + case tok_double: + case tok_string: + case tok_map: + case tok_list: + case tok_set: + case tok_async: + case IDENTIFIER: + ; + break; + default: + jj_la1[19] = jj_gen; + break label_7; + } + } + UnflagArgs(); + jj_consume_token(61); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + // at some point, these should be inserted as a "db" + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode FlagArgs() throws ParseException { + /*@bgen(jjtree) FlagArgs */ + DynamicSerDeFlagArgs jjtn000 = new DynamicSerDeFlagArgs(JJTFLAGARGS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode UnflagArgs() throws ParseException { + /*@bgen(jjtree) UnflagArgs */ + DynamicSerDeUnflagArgs jjtn000 = new DynamicSerDeUnflagArgs(JJTUNFLAGARGS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode Extends() throws ParseException { + /*@bgen(jjtree) Extends */ + DynamicSerDeExtends jjtn000 = new DynamicSerDeExtends(JJTEXTENDS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_extends: + jj_consume_token(tok_extends); + jj_consume_token(IDENTIFIER); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[20] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeFunction Function() throws ParseException { + /*@bgen(jjtree) Function */ + DynamicSerDeFunction jjtn000 = new DynamicSerDeFunction(JJTFUNCTION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + Async(); + FunctionType(); + // the name of the function/table + jjtn000.name = jj_consume_token(IDENTIFIER).image; + jj_consume_token(66); + FieldList(); + jj_consume_token(67); + Throws(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 58: + case 59: + CommaOrSemicolon(); + break; + default: + jj_la1[21] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + this.tables.put(jjtn000.name, jjtn000); + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public void Async() throws ParseException { + /*@bgen(jjtree) Async */ + DynamicSerDeAsync jjtn000 = new DynamicSerDeAsync(JJTASYNC); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_async: + jj_consume_token(tok_async); + break; + default: + jj_la1[22] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + } + + final public void Throws() throws ParseException { + /*@bgen(jjtree) Throws */ + DynamicSerDeThrows jjtn000 = new DynamicSerDeThrows(JJTTHROWS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_throws: + jj_consume_token(tok_throws); + jj_consume_token(66); + FieldList(); + jj_consume_token(67); + break; + default: + jj_la1[23] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + } + +// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields + final public DynamicSerDeFieldList FieldList() throws ParseException { + /*@bgen(jjtree) FieldList */ + DynamicSerDeFieldList jjtn000 = new DynamicSerDeFieldList(JJTFIELDLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000);this.field_val = -1; + try { + label_8: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_bool: + case tok_i16: + case tok_i32: + case tok_i64: + case tok_double: + case tok_string: + case tok_map: + case tok_list: + case tok_set: + case tok_required: + case tok_optional: + case tok_int_constant: + case IDENTIFIER: + ; + break; + default: + jj_la1[24] = jj_gen; + break label_8; + } + Field(); + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeField Field() throws ParseException { + /*@bgen(jjtree) Field */ + DynamicSerDeField jjtn000 = new DynamicSerDeField(JJTFIELD); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000);String fidnum = ""; + String fid; + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_int_constant: + fidnum = jj_consume_token(tok_int_constant).image; + jj_consume_token(65); + break; + default: + jj_la1[25] = jj_gen; + ; + } + FieldRequiredness(); + FieldType(); + // the name of the field - not optional + jjtn000.name = jj_consume_token(IDENTIFIER).image; + FieldValue(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 58: + case 59: + CommaOrSemicolon(); + break; + default: + jj_la1[26] = jj_gen; + ; + } + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + if(fidnum.length() > 0) { + int fidInt = Integer.valueOf(fidnum); + jjtn000.fieldid = fidInt; + } else { + jjtn000.fieldid = this.field_val--; + } + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode FieldRequiredness() throws ParseException { + /*@bgen(jjtree) FieldRequiredness */ + DynamicSerDeFieldRequiredness jjtn000 = new DynamicSerDeFieldRequiredness(JJTFIELDREQUIREDNESS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_required: + jj_consume_token(tok_required); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_optional: + jj_consume_token(tok_optional); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[27] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode FieldValue() throws ParseException { + /*@bgen(jjtree) FieldValue */ + DynamicSerDeFieldValue jjtn000 = new DynamicSerDeFieldValue(JJTFIELDVALUE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case 62: + jj_consume_token(62); + ConstValue(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[28] = jj_gen; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public SimpleNode DefinitionType() throws ParseException { + /*@bgen(jjtree) DefinitionType */ + DynamicSerDeDefinitionType jjtn000 = new DynamicSerDeDefinitionType(JJTDEFINITIONTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_string: + TypeString(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_bool: + TypeBool(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i16: + Typei16(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i32: + Typei32(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i64: + Typei64(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_double: + TypeDouble(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_map: + TypeMap(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_set: + TypeSet(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_list: + TypeList(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[29] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public void FunctionType() throws ParseException { + /*@bgen(jjtree) FunctionType */ + DynamicSerDeFunctionType jjtn000 = new DynamicSerDeFunctionType(JJTFUNCTIONTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_bool: + case tok_i16: + case tok_i32: + case tok_i64: + case tok_double: + case tok_string: + case tok_map: + case tok_list: + case tok_set: + case IDENTIFIER: + FieldType(); + break; + case tok_void: + jj_consume_token(tok_void); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + + break; + default: + jj_la1[30] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + } + + final public DynamicSerDeFieldType FieldType() throws ParseException { + /*@bgen(jjtree) FieldType */ + DynamicSerDeFieldType jjtn000 = new DynamicSerDeFieldType(JJTFIELDTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case tok_string: + TypeString(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_bool: + TypeBool(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i16: + Typei16(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i32: + Typei32(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_i64: + Typei64(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_double: + TypeDouble(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_map: + TypeMap(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_set: + TypeSet(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case tok_list: + TypeList(); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + case IDENTIFIER: + jjtn000.name = jj_consume_token(IDENTIFIER).image; + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + break; + default: + jj_la1[31] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeString TypeString() throws ParseException { + /*@bgen(jjtree) TypeString */ + DynamicSerDeTypeString jjtn000 = new DynamicSerDeTypeString(JJTTYPESTRING); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_string); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeByte TypeByte() throws ParseException { + /*@bgen(jjtree) TypeByte */ + DynamicSerDeTypeByte jjtn000 = new DynamicSerDeTypeByte(JJTTYPEBYTE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_byte); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypei16 Typei16() throws ParseException { + /*@bgen(jjtree) Typei16 */ + DynamicSerDeTypei16 jjtn000 = new DynamicSerDeTypei16(JJTTYPEI16); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_i16); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypei32 Typei32() throws ParseException { + /*@bgen(jjtree) Typei32 */ + DynamicSerDeTypei32 jjtn000 = new DynamicSerDeTypei32(JJTTYPEI32); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_i32); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypei64 Typei64() throws ParseException { + /*@bgen(jjtree) Typei64 */ + DynamicSerDeTypei64 jjtn000 = new DynamicSerDeTypei64(JJTTYPEI64); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_i64); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeDouble TypeDouble() throws ParseException { + /*@bgen(jjtree) TypeDouble */ + DynamicSerDeTypeDouble jjtn000 = new DynamicSerDeTypeDouble(JJTTYPEDOUBLE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_double); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeBool TypeBool() throws ParseException { + /*@bgen(jjtree) TypeBool */ + DynamicSerDeTypeBool jjtn000 = new DynamicSerDeTypeBool(JJTTYPEBOOL); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_bool); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeMap TypeMap() throws ParseException { + /*@bgen(jjtree) TypeMap */ + DynamicSerDeTypeMap jjtn000 = new DynamicSerDeTypeMap(JJTTYPEMAP); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_map); + jj_consume_token(68); + FieldType(); + jj_consume_token(58); + FieldType(); + jj_consume_token(69); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeSet TypeSet() throws ParseException { + /*@bgen(jjtree) TypeSet */ + DynamicSerDeTypeSet jjtn000 = new DynamicSerDeTypeSet(JJTTYPESET); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_set); + jj_consume_token(68); + FieldType(); + jj_consume_token(69); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + final public DynamicSerDeTypeList TypeList() throws ParseException { + /*@bgen(jjtree) TypeList */ + DynamicSerDeTypeList jjtn000 = new DynamicSerDeTypeList(JJTTYPELIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); + try { + jj_consume_token(tok_list); + jj_consume_token(68); + FieldType(); + jj_consume_token(69); + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + {if (true) return jjtn000;} + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + {if (true) throw (RuntimeException)jjte000;} + } + if (jjte000 instanceof ParseException) { + {if (true) throw (ParseException)jjte000;} + } + {if (true) throw (Error)jjte000;} + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } + throw new Error("Missing return statement in function"); + } + + public thrift_grammarTokenManager token_source; + SimpleCharStream jj_input_stream; + public Token token, jj_nt; + private int jj_ntk; + private int jj_gen; + final private int[] jj_la1 = new int[32]; + static private int[] jj_la1_0; + static private int[] jj_la1_1; + static private int[] jj_la1_2; + static { + jj_la1_0(); + jj_la1_1(); + jj_la1_2(); + } + private static void jj_la1_0() { + jj_la1_0 = new int[] {0x100,0xa3fee00,0xa3fee00,0x23fee00,0x100,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xb0000000,0x0,0x0,0x0,0x0,0xa0000000,0x0,0x0,0x0,0x0,0xa0000000,0xb0000000,0xa0000000,}; + } + private static void jj_la1_1() { + jj_la1_1 = new int[] {0x19c20,0x0,0x0,0x0,0x19c20,0x11c20,0xc000000,0x200000,0x40000000,0xc000000,0x1000000,0xc000000,0xc000000,0x91380000,0xc000000,0x91380000,0xc000000,0x91380000,0x91380000,0x2003cf,0x2000,0xc000000,0x200,0x4000,0x2e01cf,0x80000,0xc000000,0x60000,0x40000000,0x1cf,0x2001cf,0x2001cf,}; + } + private static void jj_la1_2() { + jj_la1_2 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; + } + + public thrift_grammar(java.io.InputStream stream) { + this(stream, null); + } + public thrift_grammar(java.io.InputStream stream, String encoding) { + try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } + token_source = new thrift_grammarTokenManager(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 32; i++) jj_la1[i] = -1; + } + + public void ReInit(java.io.InputStream stream) { + ReInit(stream, null); + } + public void ReInit(java.io.InputStream stream, String encoding) { + try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } + token_source.ReInit(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jjtree.reset(); + jj_gen = 0; + for (int i = 0; i < 32; i++) jj_la1[i] = -1; + } + + public thrift_grammar(java.io.Reader stream) { + jj_input_stream = new SimpleCharStream(stream, 1, 1); + token_source = new thrift_grammarTokenManager(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 32; i++) jj_la1[i] = -1; + } + + public void ReInit(java.io.Reader stream) { + jj_input_stream.ReInit(stream, 1, 1); + token_source.ReInit(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jjtree.reset(); + jj_gen = 0; + for (int i = 0; i < 32; i++) jj_la1[i] = -1; + } + + public thrift_grammar(thrift_grammarTokenManager tm) { + token_source = tm; + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 32; i++) jj_la1[i] = -1; + } + + public void ReInit(thrift_grammarTokenManager tm) { + token_source = tm; + token = new Token(); + jj_ntk = -1; + jjtree.reset(); + jj_gen = 0; + for (int i = 0; i < 32; i++) jj_la1[i] = -1; + } + + final private Token jj_consume_token(int kind) throws ParseException { + Token oldToken; + if ((oldToken = token).next != null) token = token.next; + else token = token.next = token_source.getNextToken(); + jj_ntk = -1; + if (token.kind == kind) { + jj_gen++; + return token; + } + token = oldToken; + jj_kind = kind; + throw generateParseException(); + } + + final public Token getNextToken() { + if (token.next != null) token = token.next; + else token = token.next = token_source.getNextToken(); + jj_ntk = -1; + jj_gen++; + return token; + } + + final public Token getToken(int index) { + Token t = token; + for (int i = 0; i < index; i++) { + if (t.next != null) t = t.next; + else t = t.next = token_source.getNextToken(); + } + return t; + } + + final private int jj_ntk() { + if ((jj_nt=token.next) == null) + return (jj_ntk = (token.next=token_source.getNextToken()).kind); + else + return (jj_ntk = jj_nt.kind); + } + + private java.util.Vector jj_expentries = new java.util.Vector(); + private int[] jj_expentry; + private int jj_kind = -1; + + public ParseException generateParseException() { + jj_expentries.removeAllElements(); + boolean[] la1tokens = new boolean[70]; + for (int i = 0; i < 70; i++) { + la1tokens[i] = false; + } + if (jj_kind >= 0) { + la1tokens[jj_kind] = true; + jj_kind = -1; + } + for (int i = 0; i < 32; i++) { + if (jj_la1[i] == jj_gen) { + for (int j = 0; j < 32; j++) { + if ((jj_la1_0[i] & (1< include_path = null; + + // for computing the autogenerated field ids in thrift + private int field_val; + + // store types and tables + // separately because one cannot use a table (ie service.method) as a Struct like type. + protected Map types; + protected Map tables; + + // system include path + final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; + + // need three params to differentiate between this and 2 param method auto generated since + // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. + protected thrift_grammar(InputStream is, List include_path, boolean junk) { + this(is,null); + this.types = new HashMap () ; + this.tables = new HashMap () ; + this.include_path = include_path; + this.field_val = -1; + } + + // find the file on the include path + private static File findFile(String fname, List include_path) { + for(String path: include_path) { + final String full = path + "/" + fname; + File f = new File(full); + if(f.exists()) { + return f; + } + } + return null; + } + + public static void main(String args[]) { + String filename = null; + List include_path = new ArrayList(); + + for(String path: default_include_path) { + include_path.add(path); + } + for(int i = 0; i < args.length; i++) { + String arg = args[i]; + if(arg.equals("--include") && i + 1 < args.length) { + include_path.add(args[++i]); + } + if(arg.equals("--file") && i + 1 < args.length) { + filename = args[++i]; + } + } + + InputStream is = System.in; + if(filename != null) { + try { + is = new FileInputStream(findFile(filename, include_path)); + } catch(IOException e) { + } + } + thrift_grammar t = new thrift_grammar(is,include_path,false); + + try { + t.Start(); + } catch (Exception e) { + System.out.println("Parse error."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } +} + +PARSER_END(thrift_grammar) + + + +SKIP : +{ + " " +| "\t" +| "\n" +| "\r" +| <"#"(~["\n"])* ("\n"|"\r"|"\r\n")> +| <"//" (~["\n","\r"])* ("\n"|"\r"|"\r\n")> +| <"/*" (~["*"])* "*" (~["/"] (~["*"])* "*")* "/"> +} + + +/** + * HELPER DEFINITIONS, COMMENTS, CONSTANTS, AND WHATNOT + */ + +TOKEN: +{ +| + | +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| + | + | + | + | + | + | + | + | + | + | + | + | + +} + +TOKEN: { + + +| +)*"."()+(["e","E"](["+","-"])?()+)?> +| +(||"."|"_")*> +| +<#LETTER: (["a"-"z", "A"-"Z" ]) > +| +<#DIGIT: ["0"-"9"] > +| + +| + +} + + +SimpleNode Start() : {/*@bgen(jjtree) Start */ + DynamicSerDeStart jjtn000 = new DynamicSerDeStart(JJTSTART); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Start */ + try { +/*@egen*/ + HeaderList() (Definition())+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode HeaderList() : {/*@bgen(jjtree) HeaderList */ + DynamicSerDeHeaderList jjtn000 = new DynamicSerDeHeaderList(JJTHEADERLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) HeaderList */ + try { +/*@egen*/ + (Header())*/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ + +} + +SimpleNode Header() : {/*@bgen(jjtree) Header */ + DynamicSerDeHeader jjtn000 = new DynamicSerDeHeader(JJTHEADER); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Header */ + try { +/*@egen*/ + Include()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Namespace()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode Namespace() : {/*@bgen(jjtree) Namespace */ + DynamicSerDeNamespace jjtn000 = new DynamicSerDeNamespace(JJTNAMESPACE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Namespace */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +} +| + /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +}/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Include() : {/*@bgen(jjtree) Include */ + DynamicSerDeInclude jjtn000 = new DynamicSerDeInclude(JJTINCLUDE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ + String fname; + boolean found = false; +} +{/*@bgen(jjtree) Include */ + try { +/*@egen*/ + + fname=.image/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + // bugbug somewhat fragile below substring expression + fname = fname.substring(1,fname.length() - 1); + + // try to find the file on the include path + File f = thrift_grammar.findFile(fname, this.include_path); + if(f != null) { + found = true; + try { + FileInputStream fis = new FileInputStream(f); + thrift_grammar t = new thrift_grammar(fis,this.include_path, false); + t.Start(); + fis.close(); + found = true; + // add in what we found to our type and table tables. + this.tables.putAll(t.tables); + this.types.putAll(t.types); + } catch (Exception e) { + System.out.println("File: " + fname + " - Oops."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } + if(!found) { + throw new RuntimeException("include file not found: " + fname); + } + return jjtn000; +}/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Definition() : {/*@bgen(jjtree) Definition */ + DynamicSerDeDefinition jjtn000 = new DynamicSerDeDefinition(JJTDEFINITION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Definition */ + try { +/*@egen*/ + Const()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Service()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeDefinition()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode TypeDefinition() : {/*@bgen(jjtree) TypeDefinition */ + DynamicSerDeTypeDefinition jjtn000 = new DynamicSerDeTypeDefinition(JJTTYPEDEFINITION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeDefinition */ + try { +/*@egen*/ + Typedef()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Enum()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Senum()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Struct()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Xception()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ + +} + +DynamicSerDeTypedef Typedef() : {/*@bgen(jjtree) Typedef */ + DynamicSerDeTypedef jjtn000 = new DynamicSerDeTypedef(JJTTYPEDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Typedef */ + try { +/*@egen*/ + + DefinitionType() + jjtn000.name = .image/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + // store the type for later retrieval + this.types.put(jjtn000.name, jjtn000); + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +// returning void because we ignore this production. +void CommaOrSemicolon() : {/*@bgen(jjtree) CommaOrSemicolon */ + DynamicSerDeCommaOrSemicolon jjtn000 = new DynamicSerDeCommaOrSemicolon(JJTCOMMAORSEMICOLON); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) CommaOrSemicolon */ + try { +/*@egen*/ + "," +| + ";"/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ +}/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode Enum() : {/*@bgen(jjtree) Enum */ + DynamicSerDeEnum jjtn000 = new DynamicSerDeEnum(JJTENUM); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Enum */ + try { +/*@egen*/ + "{" EnumDefList() "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode EnumDefList() : {/*@bgen(jjtree) EnumDefList */ + DynamicSerDeEnumDefList jjtn000 = new DynamicSerDeEnumDefList(JJTENUMDEFLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) EnumDefList */ + try { +/*@egen*/ + (EnumDef())+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode EnumDef() : {/*@bgen(jjtree) EnumDef */ + DynamicSerDeEnumDef jjtn000 = new DynamicSerDeEnumDef(JJTENUMDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) EnumDef */ + try { +/*@egen*/ + ["=" ] [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode Senum() : {/*@bgen(jjtree) Senum */ + DynamicSerDeSenum jjtn000 = new DynamicSerDeSenum(JJTSENUM); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Senum */ + try { +/*@egen*/ + "{" SenumDefList() "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode SenumDefList() : {/*@bgen(jjtree) SenumDefList */ + DynamicSerDeSenumDefList jjtn000 = new DynamicSerDeSenumDefList(JJTSENUMDEFLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) SenumDefList */ + try { +/*@egen*/ + (SenumDef())+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode SenumDef() : {/*@bgen(jjtree) SenumDef */ + DynamicSerDeSenumDef jjtn000 = new DynamicSerDeSenumDef(JJTSENUMDEF); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) SenumDef */ + try { +/*@egen*/ + [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Const() : {/*@bgen(jjtree) Const */ + DynamicSerDeConst jjtn000 = new DynamicSerDeConst(JJTCONST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Const */ + try { +/*@egen*/ + FieldType() "=" ConstValue() [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstValue() : {/*@bgen(jjtree) ConstValue */ + DynamicSerDeConstValue jjtn000 = new DynamicSerDeConstValue(JJTCONSTVALUE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstValue */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| ConstList()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +| ConstMap()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstList() : {/*@bgen(jjtree) ConstList */ + DynamicSerDeConstList jjtn000 = new DynamicSerDeConstList(JJTCONSTLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstList */ + try { +/*@egen*/ + "[" ConstListContents() "]"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstListContents() : {/*@bgen(jjtree) ConstListContents */ + DynamicSerDeConstListContents jjtn000 = new DynamicSerDeConstListContents(JJTCONSTLISTCONTENTS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstListContents */ + try { +/*@egen*/ + (ConstValue() [CommaOrSemicolon()])+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstMap() : {/*@bgen(jjtree) ConstMap */ + DynamicSerDeConstMap jjtn000 = new DynamicSerDeConstMap(JJTCONSTMAP); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstMap */ + try { +/*@egen*/ + "{" ConstMapContents() "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode ConstMapContents() : {/*@bgen(jjtree) ConstMapContents */ + DynamicSerDeConstMapContents jjtn000 = new DynamicSerDeConstMapContents(JJTCONSTMAPCONTENTS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) ConstMapContents */ + try { +/*@egen*/ + (ConstValue() ":" ConstValue() [CommaOrSemicolon()])+/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + } +|/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeStruct Struct() : {/*@bgen(jjtree) Struct */ + DynamicSerDeStruct jjtn000 = new DynamicSerDeStruct(JJTSTRUCT); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ + +} +{/*@bgen(jjtree) Struct */ + try { +/*@egen*/ + + jjtn000.name = .image + "{" + FieldList() + "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + this.types.put(jjtn000.name,jjtn000); + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Xception() : {/*@bgen(jjtree) Xception */ + DynamicSerDeXception jjtn000 = new DynamicSerDeXception(JJTXCEPTION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Xception */ + try { +/*@egen*/ + "{" FieldList() "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +SimpleNode Service() : {/*@bgen(jjtree) Service */ + DynamicSerDeService jjtn000 = new DynamicSerDeService(JJTSERVICE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Service */ + try { +/*@egen*/ + + + Extends() + "{" + FlagArgs() + (Function())+ + UnflagArgs() + "}"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + // at some point, these should be inserted as a "db" + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode FlagArgs() : {/*@bgen(jjtree) FlagArgs */ + DynamicSerDeFlagArgs jjtn000 = new DynamicSerDeFlagArgs(JJTFLAGARGS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) FlagArgs */ + try { +/*@egen*//*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode UnflagArgs() : {/*@bgen(jjtree) UnflagArgs */ + DynamicSerDeUnflagArgs jjtn000 = new DynamicSerDeUnflagArgs(JJTUNFLAGARGS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) UnflagArgs */ + try { +/*@egen*//*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode Extends() : {/*@bgen(jjtree) Extends */ + DynamicSerDeExtends jjtn000 = new DynamicSerDeExtends(JJTEXTENDS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Extends */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +|/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +DynamicSerDeFunction Function() : {/*@bgen(jjtree) Function */ + DynamicSerDeFunction jjtn000 = new DynamicSerDeFunction(JJTFUNCTION); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Function */ + try { +/*@egen*/ + // metastore ignores async and type + Async() + FunctionType() + + // the name of the function/table + jjtn000.name = .image + "(" + FieldList() + ")" + Throws() + [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + + { + this.tables.put(jjtn000.name, jjtn000); + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +void Async() : {/*@bgen(jjtree) Async */ + DynamicSerDeAsync jjtn000 = new DynamicSerDeAsync(JJTASYNC); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Async */ + try { +/*@egen*/ + +|/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{}/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +void Throws() : {/*@bgen(jjtree) Throws */ + DynamicSerDeThrows jjtn000 = new DynamicSerDeThrows(JJTTHROWS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Throws */ + try { +/*@egen*/ + "(" FieldList() ")" +|/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{}/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields +DynamicSerDeFieldList FieldList() : {/*@bgen(jjtree) FieldList */ + DynamicSerDeFieldList jjtn000 = new DynamicSerDeFieldList(JJTFIELDLIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ + this.field_val = -1; +} +{/*@bgen(jjtree) FieldList */ + try { +/*@egen*/ + (Field())*/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + +DynamicSerDeField Field() : {/*@bgen(jjtree) Field */ + DynamicSerDeField jjtn000 = new DynamicSerDeField(JJTFIELD); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ + + String fidnum = ""; + String fid; +} +{/*@bgen(jjtree) Field */ + try { +/*@egen*/ + + // parse the field id which is optional + [fidnum=.image ":"] + + // is this field required or optional? default is optional + FieldRequiredness() + + // field type - obviously not optional + FieldType() + + // the name of the field - not optional + jjtn000.name = .image + + // does it have = some value? + FieldValue() + + // take it or leave it + [CommaOrSemicolon()]/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + + { + if(fidnum.length() > 0) { + int fidInt = Integer.valueOf(fidnum); + jjtn000.fieldid = fidInt; + } else { + jjtn000.fieldid = this.field_val--; + } + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + + + +SimpleNode FieldRequiredness() : {/*@bgen(jjtree) FieldRequiredness */ + DynamicSerDeFieldRequiredness jjtn000 = new DynamicSerDeFieldRequiredness(JJTFIELDREQUIREDNESS); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) FieldRequiredness */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +|/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode FieldValue() : {/*@bgen(jjtree) FieldValue */ + DynamicSerDeFieldValue jjtn000 = new DynamicSerDeFieldValue(JJTFIELDVALUE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) FieldValue */ + try { +/*@egen*/ + "=" + ConstValue()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +|/*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{ + return jjtn000; +}/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +SimpleNode DefinitionType() : {/*@bgen(jjtree) DefinitionType */ + DynamicSerDeDefinitionType jjtn000 = new DynamicSerDeDefinitionType(JJTDEFINITIONTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) DefinitionType */ + try { +/*@egen*/ +// BaseType() xxx + TypeString()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeBool()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei16()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei32()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei64()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeDouble()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeMap()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeSet()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeList()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +void FunctionType() : {/*@bgen(jjtree) FunctionType */ + DynamicSerDeFunctionType jjtn000 = new DynamicSerDeFunctionType(JJTFUNCTIONTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) FunctionType */ + try { +/*@egen*/ + FieldType() +| /*@bgen(jjtree)*/ +{ + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; +} +/*@egen*/ +{}/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeFieldType FieldType() : {/*@bgen(jjtree) FieldType */ + DynamicSerDeFieldType jjtn000 = new DynamicSerDeFieldType(JJTFIELDTYPE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ +} + +{/*@bgen(jjtree) FieldType */ + try { +/*@egen*/ + TypeString()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeBool()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei16()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei32()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| Typei64()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| TypeDouble()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| + TypeMap()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| + TypeSet()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| + TypeList()/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + } +| + jjtn000.name = .image/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeString TypeString() : {/*@bgen(jjtree) TypeString */ + DynamicSerDeTypeString jjtn000 = new DynamicSerDeTypeString(JJTTYPESTRING); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeString */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeByte TypeByte() : {/*@bgen(jjtree) TypeByte */ + DynamicSerDeTypeByte jjtn000 = new DynamicSerDeTypeByte(JJTTYPEBYTE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ +} +{/*@bgen(jjtree) TypeByte */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypei16 Typei16() : {/*@bgen(jjtree) Typei16 */ + DynamicSerDeTypei16 jjtn000 = new DynamicSerDeTypei16(JJTTYPEI16); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/ +} +{/*@bgen(jjtree) Typei16 */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypei32 Typei32() : {/*@bgen(jjtree) Typei32 */ + DynamicSerDeTypei32 jjtn000 = new DynamicSerDeTypei32(JJTTYPEI32); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Typei32 */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypei64 Typei64() : {/*@bgen(jjtree) Typei64 */ + DynamicSerDeTypei64 jjtn000 = new DynamicSerDeTypei64(JJTTYPEI64); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) Typei64 */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeDouble TypeDouble() : {/*@bgen(jjtree) TypeDouble */ + DynamicSerDeTypeDouble jjtn000 = new DynamicSerDeTypeDouble(JJTTYPEDOUBLE); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeDouble */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeBool TypeBool() : {/*@bgen(jjtree) TypeBool */ + DynamicSerDeTypeBool jjtn000 = new DynamicSerDeTypeBool(JJTTYPEBOOL); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeBool */ + try { +/*@egen*/ + /*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeMap TypeMap() : {/*@bgen(jjtree) TypeMap */ + DynamicSerDeTypeMap jjtn000 = new DynamicSerDeTypeMap(JJTTYPEMAP); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeMap */ + try { +/*@egen*/ + + "<" + FieldType() + "," + FieldType() + ">"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeSet TypeSet() : {/*@bgen(jjtree) TypeSet */ + DynamicSerDeTypeSet jjtn000 = new DynamicSerDeTypeSet(JJTTYPESET); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeSet */ + try { +/*@egen*/ + + "<" + + FieldType() + + ">"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} + +DynamicSerDeTypeList TypeList() : {/*@bgen(jjtree) TypeList */ + DynamicSerDeTypeList jjtn000 = new DynamicSerDeTypeList(JJTTYPELIST); + boolean jjtc000 = true; + jjtree.openNodeScope(jjtn000); +/*@egen*/} +{/*@bgen(jjtree) TypeList */ + try { +/*@egen*/ + + "<" + + FieldType() + + ">"/*@bgen(jjtree)*/ + { + jjtree.closeNodeScope(jjtn000, true); + jjtc000 = false; + } +/*@egen*/ + { + return jjtn000; + }/*@bgen(jjtree)*/ + } catch (Throwable jjte000) { + if (jjtc000) { + jjtree.clearNodeScope(jjtn000); + jjtc000 = false; + } else { + jjtree.popNode(); + } + if (jjte000 instanceof RuntimeException) { + throw (RuntimeException)jjte000; + } + if (jjte000 instanceof ParseException) { + throw (ParseException)jjte000; + } + throw (Error)jjte000; + } finally { + if (jjtc000) { + jjtree.closeNodeScope(jjtn000, true); + } + } +/*@egen*/ +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java new file mode 100644 index 000000000..b8c7bf8ae --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarConstants.java @@ -0,0 +1,133 @@ +/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammarConstants.java */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +public interface thrift_grammarConstants { + + int EOF = 0; + int tok_const = 8; + int tok_namespace = 9; + int tok_cpp_namespace = 10; + int tok_cpp_include = 11; + int tok_cpp_type = 12; + int tok_java_package = 13; + int tok_cocoa_prefix = 14; + int tok_csharp_namespace = 15; + int tok_php_namespace = 16; + int tok_py_module = 17; + int tok_perl_package = 18; + int tok_ruby_namespace = 19; + int tok_smalltalk_category = 20; + int tok_smalltalk_prefix = 21; + int tok_xsd_all = 22; + int tok_xsd_optional = 23; + int tok_xsd_nillable = 24; + int tok_xsd_namespace = 25; + int tok_xsd_attrs = 26; + int tok_include = 27; + int tok_void = 28; + int tok_bool = 29; + int tok_byte = 30; + int tok_i16 = 31; + int tok_i32 = 32; + int tok_i64 = 33; + int tok_double = 34; + int tok_string = 35; + int tok_slist = 36; + int tok_senum = 37; + int tok_map = 38; + int tok_list = 39; + int tok_set = 40; + int tok_async = 41; + int tok_typedef = 42; + int tok_struct = 43; + int tok_exception = 44; + int tok_extends = 45; + int tok_throws = 46; + int tok_service = 47; + int tok_enum = 48; + int tok_required = 49; + int tok_optional = 50; + int tok_int_constant = 51; + int tok_double_constant = 52; + int IDENTIFIER = 53; + int LETTER = 54; + int DIGIT = 55; + int tok_literal = 56; + int tok_st_identifier = 57; + + int DEFAULT = 0; + + String[] tokenImage = { + "", + "\" \"", + "\"\\t\"", + "\"\\n\"", + "\"\\r\"", + "", + "", + "", + "\"const\"", + "\"namespace\"", + "\"cpp_namespace\"", + "\"cpp_include\"", + "\"cpp_type\"", + "\"java_package\"", + "\"cocoa_prefix\"", + "\"csharp_namespace\"", + "\"php_namespace\"", + "\"py_module\"", + "\"perl_package\"", + "\"ruby_namespace\"", + "\"smalltalk_category\"", + "\"smalltalk_prefix\"", + "\"xsd_all\"", + "\"xsd_optional\"", + "\"xsd_nillable\"", + "\"xsd_namespace\"", + "\"xsd_attrs\"", + "\"include\"", + "\"void\"", + "\"bool\"", + "\"byte\"", + "\"i16\"", + "\"i32\"", + "\"i64\"", + "\"double\"", + "\"string\"", + "\"slist\"", + "\"senum\"", + "\"map\"", + "\"list\"", + "\"set\"", + "\"async\"", + "\"typedef\"", + "\"struct\"", + "\"exception\"", + "\"extends\"", + "\"throws\"", + "\"service\"", + "\"enum\"", + "\"required\"", + "\"optional\"", + "", + "", + "", + "", + "", + "", + "", + "\",\"", + "\";\"", + "\"{\"", + "\"}\"", + "\"=\"", + "\"[\"", + "\"]\"", + "\":\"", + "\"(\"", + "\")\"", + "\"<\"", + "\">\"", + }; + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java new file mode 100644 index 000000000..d03a56ef4 --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTokenManager.java @@ -0,0 +1,1455 @@ +/* Generated By:JJTree&JavaCC: Do not edit this line. thrift_grammarTokenManager.java */ +package org.apache.hadoop.hive.serde2.dynamic_type; +import java.util.*; +import java.io.*; +import java.net.*; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.transport.*; +import org.apache.hadoop.hive.serde2.dynamic_type.*; + +public class thrift_grammarTokenManager implements thrift_grammarConstants +{ + public java.io.PrintStream debugStream = System.out; + public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } +private final int jjStopStringLiteralDfa_0(int pos, long active0, long active1) +{ + switch (pos) + { + case 0: + if ((active0 & 0x7ffffffffff00L) != 0L) + { + jjmatchedKind = 53; + return 35; + } + return -1; + case 1: + if ((active0 & 0x7ffffffffff00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 1; + return 35; + } + return -1; + case 2: + if ((active0 & 0x14380000000L) != 0L) + return 35; + if ((active0 & 0x7febc7fffff00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 2; + return 35; + } + return -1; + case 3: + if ((active0 & 0x1008070000000L) != 0L) + return 35; + if ((active0 & 0x6fe3c0fffff00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 3; + return 35; + } + return -1; + case 4: + if ((active0 & 0x23000000100L) != 0L) + return 35; + if ((active0 & 0x6fc0c0ffffe00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 4; + return 35; + } + return -1; + case 5: + if ((active0 & 0x480c00000000L) != 0L) + return 35; + if ((active0 & 0x6b4000ffffe00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 5; + return 35; + } + return -1; + case 6: + if ((active0 & 0xa40008400000L) != 0L) + return 35; + if ((active0 & 0x6100007bffe00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 6; + return 35; + } + return -1; + case 7: + if ((active0 & 0x6000000001000L) != 0L) + return 35; + if ((active0 & 0x100007bfee00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 7; + return 35; + } + return -1; + case 8: + if ((active0 & 0x3bdec00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 8; + return 35; + } + if ((active0 & 0x100004020200L) != 0L) + return 35; + return -1; + case 9: + if ((active0 & 0x3bdec00L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 9; + return 35; + } + return -1; + case 10: + if ((active0 & 0x800L) != 0L) + return 35; + if ((active0 & 0x3bde400L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 10; + return 35; + } + return -1; + case 11: + if ((active0 & 0x1846000L) != 0L) + return 35; + if ((active0 & 0x2398400L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 11; + return 35; + } + return -1; + case 12: + if ((active0 & 0x2010400L) != 0L) + return 35; + if ((active0 & 0x388000L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 12; + return 35; + } + return -1; + case 13: + if ((active0 & 0x80000L) != 0L) + return 35; + if ((active0 & 0x308000L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 13; + return 35; + } + return -1; + case 14: + if ((active0 & 0x308000L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 14; + return 35; + } + return -1; + case 15: + if ((active0 & 0x208000L) != 0L) + return 35; + if ((active0 & 0x100000L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 15; + return 35; + } + return -1; + case 16: + if ((active0 & 0x100000L) != 0L) + { + jjmatchedKind = 53; + jjmatchedPos = 16; + return 35; + } + return -1; + default : + return -1; + } +} +private final int jjStartNfa_0(int pos, long active0, long active1) +{ + return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0, active1), pos + 1); +} +private final int jjStopAtPos(int pos, int kind) +{ + jjmatchedKind = kind; + jjmatchedPos = pos; + return pos + 1; +} +private final int jjStartNfaWithStates_0(int pos, int kind, int state) +{ + jjmatchedKind = kind; + jjmatchedPos = pos; + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { return pos + 1; } + return jjMoveNfa_0(state, pos + 1); +} +private final int jjMoveStringLiteralDfa0_0() +{ + switch(curChar) + { + case 40: + return jjStopAtPos(0, 66); + case 41: + return jjStopAtPos(0, 67); + case 44: + return jjStopAtPos(0, 58); + case 58: + return jjStopAtPos(0, 65); + case 59: + return jjStopAtPos(0, 59); + case 60: + return jjStopAtPos(0, 68); + case 61: + return jjStopAtPos(0, 62); + case 62: + return jjStopAtPos(0, 69); + case 91: + return jjStopAtPos(0, 63); + case 93: + return jjStopAtPos(0, 64); + case 97: + return jjMoveStringLiteralDfa1_0(0x20000000000L); + case 98: + return jjMoveStringLiteralDfa1_0(0x60000000L); + case 99: + return jjMoveStringLiteralDfa1_0(0xdd00L); + case 100: + return jjMoveStringLiteralDfa1_0(0x400000000L); + case 101: + return jjMoveStringLiteralDfa1_0(0x1300000000000L); + case 105: + return jjMoveStringLiteralDfa1_0(0x388000000L); + case 106: + return jjMoveStringLiteralDfa1_0(0x2000L); + case 108: + return jjMoveStringLiteralDfa1_0(0x8000000000L); + case 109: + return jjMoveStringLiteralDfa1_0(0x4000000000L); + case 110: + return jjMoveStringLiteralDfa1_0(0x200L); + case 111: + return jjMoveStringLiteralDfa1_0(0x4000000000000L); + case 112: + return jjMoveStringLiteralDfa1_0(0x70000L); + case 114: + return jjMoveStringLiteralDfa1_0(0x2000000080000L); + case 115: + return jjMoveStringLiteralDfa1_0(0x893800300000L); + case 116: + return jjMoveStringLiteralDfa1_0(0x440000000000L); + case 118: + return jjMoveStringLiteralDfa1_0(0x10000000L); + case 120: + return jjMoveStringLiteralDfa1_0(0x7c00000L); + case 123: + return jjStopAtPos(0, 60); + case 125: + return jjStopAtPos(0, 61); + default : + return jjMoveNfa_0(0, 0); + } +} +private final int jjMoveStringLiteralDfa1_0(long active0) +{ + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(0, active0, 0L); + return 1; + } + switch(curChar) + { + case 49: + return jjMoveStringLiteralDfa2_0(active0, 0x80000000L); + case 51: + return jjMoveStringLiteralDfa2_0(active0, 0x100000000L); + case 54: + return jjMoveStringLiteralDfa2_0(active0, 0x200000000L); + case 97: + return jjMoveStringLiteralDfa2_0(active0, 0x4000002200L); + case 101: + return jjMoveStringLiteralDfa2_0(active0, 0x2812000040000L); + case 104: + return jjMoveStringLiteralDfa2_0(active0, 0x400000010000L); + case 105: + return jjMoveStringLiteralDfa2_0(active0, 0x8000000000L); + case 108: + return jjMoveStringLiteralDfa2_0(active0, 0x1000000000L); + case 109: + return jjMoveStringLiteralDfa2_0(active0, 0x300000L); + case 110: + return jjMoveStringLiteralDfa2_0(active0, 0x1000008000000L); + case 111: + return jjMoveStringLiteralDfa2_0(active0, 0x430004100L); + case 112: + return jjMoveStringLiteralDfa2_0(active0, 0x4000000001c00L); + case 115: + return jjMoveStringLiteralDfa2_0(active0, 0x20007c08000L); + case 116: + return jjMoveStringLiteralDfa2_0(active0, 0x80800000000L); + case 117: + return jjMoveStringLiteralDfa2_0(active0, 0x80000L); + case 120: + return jjMoveStringLiteralDfa2_0(active0, 0x300000000000L); + case 121: + return jjMoveStringLiteralDfa2_0(active0, 0x40040020000L); + default : + break; + } + return jjStartNfa_0(0, active0, 0L); +} +private final int jjMoveStringLiteralDfa2_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(0, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(1, active0, 0L); + return 2; + } + switch(curChar) + { + case 50: + if ((active0 & 0x100000000L) != 0L) + return jjStartNfaWithStates_0(2, 32, 35); + break; + case 52: + if ((active0 & 0x200000000L) != 0L) + return jjStartNfaWithStates_0(2, 33, 35); + break; + case 54: + if ((active0 & 0x80000000L) != 0L) + return jjStartNfaWithStates_0(2, 31, 35); + break; + case 95: + return jjMoveStringLiteralDfa3_0(active0, 0x20000L); + case 97: + return jjMoveStringLiteralDfa3_0(active0, 0x300000L); + case 98: + return jjMoveStringLiteralDfa3_0(active0, 0x80000L); + case 99: + return jjMoveStringLiteralDfa3_0(active0, 0x100008004000L); + case 100: + return jjMoveStringLiteralDfa3_0(active0, 0x7c00000L); + case 104: + return jjMoveStringLiteralDfa3_0(active0, 0x8000L); + case 105: + return jjMoveStringLiteralDfa3_0(active0, 0x1010000000L); + case 109: + return jjMoveStringLiteralDfa3_0(active0, 0x200L); + case 110: + return jjMoveStringLiteralDfa3_0(active0, 0x2000000100L); + case 111: + return jjMoveStringLiteralDfa3_0(active0, 0x20000000L); + case 112: + if ((active0 & 0x4000000000L) != 0L) + return jjStartNfaWithStates_0(2, 38, 35); + return jjMoveStringLiteralDfa3_0(active0, 0x40000011c00L); + case 113: + return jjMoveStringLiteralDfa3_0(active0, 0x2000000000000L); + case 114: + return jjMoveStringLiteralDfa3_0(active0, 0xc80800040000L); + case 115: + return jjMoveStringLiteralDfa3_0(active0, 0x8000000000L); + case 116: + if ((active0 & 0x10000000000L) != 0L) + return jjStartNfaWithStates_0(2, 40, 35); + return jjMoveStringLiteralDfa3_0(active0, 0x4200040000000L); + case 117: + return jjMoveStringLiteralDfa3_0(active0, 0x1000400000000L); + case 118: + return jjMoveStringLiteralDfa3_0(active0, 0x2000L); + case 121: + return jjMoveStringLiteralDfa3_0(active0, 0x20000000000L); + default : + break; + } + return jjStartNfa_0(1, active0, 0L); +} +private final int jjMoveStringLiteralDfa3_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(1, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(2, active0, 0L); + return 3; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa4_0(active0, 0x7c11c00L); + case 97: + return jjMoveStringLiteralDfa4_0(active0, 0xa000L); + case 98: + return jjMoveStringLiteralDfa4_0(active0, 0x400000000L); + case 100: + if ((active0 & 0x10000000L) != 0L) + return jjStartNfaWithStates_0(3, 28, 35); + break; + case 101: + if ((active0 & 0x40000000L) != 0L) + return jjStartNfaWithStates_0(3, 30, 35); + return jjMoveStringLiteralDfa4_0(active0, 0x340000000200L); + case 105: + return jjMoveStringLiteralDfa4_0(active0, 0x4000800000000L); + case 108: + if ((active0 & 0x20000000L) != 0L) + return jjStartNfaWithStates_0(3, 29, 35); + return jjMoveStringLiteralDfa4_0(active0, 0x8340000L); + case 109: + if ((active0 & 0x1000000000000L) != 0L) + return jjStartNfaWithStates_0(3, 48, 35); + return jjMoveStringLiteralDfa4_0(active0, 0x20000L); + case 110: + return jjMoveStringLiteralDfa4_0(active0, 0x20000000000L); + case 111: + return jjMoveStringLiteralDfa4_0(active0, 0x400000004000L); + case 115: + return jjMoveStringLiteralDfa4_0(active0, 0x1000000100L); + case 116: + if ((active0 & 0x8000000000L) != 0L) + return jjStartNfaWithStates_0(3, 39, 35); + break; + case 117: + return jjMoveStringLiteralDfa4_0(active0, 0x2082000000000L); + case 118: + return jjMoveStringLiteralDfa4_0(active0, 0x800000000000L); + case 121: + return jjMoveStringLiteralDfa4_0(active0, 0x80000L); + default : + break; + } + return jjStartNfa_0(2, active0, 0L); +} +private final int jjMoveStringLiteralDfa4_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(2, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(3, active0, 0L); + return 4; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa5_0(active0, 0xc2000L); + case 97: + return jjMoveStringLiteralDfa5_0(active0, 0x4404000L); + case 99: + if ((active0 & 0x20000000000L) != 0L) + return jjStartNfaWithStates_0(4, 41, 35); + return jjMoveStringLiteralDfa5_0(active0, 0x80000000000L); + case 100: + return jjMoveStringLiteralDfa5_0(active0, 0x40000000000L); + case 105: + return jjMoveStringLiteralDfa5_0(active0, 0x2800000000800L); + case 108: + return jjMoveStringLiteralDfa5_0(active0, 0x400300000L); + case 109: + if ((active0 & 0x2000000000L) != 0L) + return jjStartNfaWithStates_0(4, 37, 35); + break; + case 110: + return jjMoveStringLiteralDfa5_0(active0, 0x200803010400L); + case 111: + return jjMoveStringLiteralDfa5_0(active0, 0x4000000820000L); + case 112: + return jjMoveStringLiteralDfa5_0(active0, 0x100000000000L); + case 114: + return jjMoveStringLiteralDfa5_0(active0, 0x8000L); + case 115: + return jjMoveStringLiteralDfa5_0(active0, 0x200L); + case 116: + if ((active0 & 0x100L) != 0L) + return jjStartNfaWithStates_0(4, 8, 35); + else if ((active0 & 0x1000000000L) != 0L) + return jjStartNfaWithStates_0(4, 36, 35); + return jjMoveStringLiteralDfa5_0(active0, 0x1000L); + case 117: + return jjMoveStringLiteralDfa5_0(active0, 0x8000000L); + case 119: + return jjMoveStringLiteralDfa5_0(active0, 0x400000000000L); + default : + break; + } + return jjStartNfa_0(3, active0, 0L); +} +private final int jjMoveStringLiteralDfa5_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(3, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(4, active0, 0L); + return 5; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa6_0(active0, 0x4000L); + case 97: + return jjMoveStringLiteralDfa6_0(active0, 0x2010400L); + case 99: + return jjMoveStringLiteralDfa6_0(active0, 0x800000000000L); + case 100: + return jjMoveStringLiteralDfa6_0(active0, 0x200008020000L); + case 101: + if ((active0 & 0x400000000L) != 0L) + return jjStartNfaWithStates_0(5, 34, 35); + return jjMoveStringLiteralDfa6_0(active0, 0x40000000000L); + case 103: + if ((active0 & 0x800000000L) != 0L) + return jjStartNfaWithStates_0(5, 35, 35); + break; + case 105: + return jjMoveStringLiteralDfa6_0(active0, 0x1000000L); + case 108: + return jjMoveStringLiteralDfa6_0(active0, 0x400000L); + case 110: + return jjMoveStringLiteralDfa6_0(active0, 0x4000000080800L); + case 112: + return jjMoveStringLiteralDfa6_0(active0, 0x84a200L); + case 114: + return jjMoveStringLiteralDfa6_0(active0, 0x2000000000000L); + case 115: + if ((active0 & 0x400000000000L) != 0L) + return jjStartNfaWithStates_0(5, 46, 35); + break; + case 116: + if ((active0 & 0x80000000000L) != 0L) + return jjStartNfaWithStates_0(5, 43, 35); + return jjMoveStringLiteralDfa6_0(active0, 0x100004300000L); + case 121: + return jjMoveStringLiteralDfa6_0(active0, 0x1000L); + default : + break; + } + return jjStartNfa_0(4, active0, 0L); +} +private final int jjMoveStringLiteralDfa6_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(4, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(5, active0, 0L); + return 6; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa7_0(active0, 0x8000L); + case 97: + return jjMoveStringLiteralDfa7_0(active0, 0x40000003c2200L); + case 99: + return jjMoveStringLiteralDfa7_0(active0, 0x800L); + case 101: + if ((active0 & 0x8000000L) != 0L) + return jjStartNfaWithStates_0(6, 27, 35); + else if ((active0 & 0x800000000000L) != 0L) + return jjStartNfaWithStates_0(6, 47, 35); + return jjMoveStringLiteralDfa7_0(active0, 0x2000000000000L); + case 102: + if ((active0 & 0x40000000000L) != 0L) + return jjStartNfaWithStates_0(6, 42, 35); + break; + case 105: + return jjMoveStringLiteralDfa7_0(active0, 0x100000000000L); + case 108: + if ((active0 & 0x400000L) != 0L) + return jjStartNfaWithStates_0(6, 22, 35); + return jjMoveStringLiteralDfa7_0(active0, 0x1000000L); + case 109: + return jjMoveStringLiteralDfa7_0(active0, 0x2010400L); + case 112: + return jjMoveStringLiteralDfa7_0(active0, 0x5000L); + case 115: + if ((active0 & 0x200000000000L) != 0L) + return jjStartNfaWithStates_0(6, 45, 35); + break; + case 116: + return jjMoveStringLiteralDfa7_0(active0, 0x4800000L); + case 117: + return jjMoveStringLiteralDfa7_0(active0, 0x20000L); + default : + break; + } + return jjStartNfa_0(5, active0, 0L); +} +private final int jjMoveStringLiteralDfa7_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(5, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(6, active0, 0L); + return 7; + } + switch(curChar) + { + case 99: + return jjMoveStringLiteralDfa8_0(active0, 0x42200L); + case 100: + if ((active0 & 0x2000000000000L) != 0L) + return jjStartNfaWithStates_0(7, 49, 35); + break; + case 101: + if ((active0 & 0x1000L) != 0L) + return jjStartNfaWithStates_0(7, 12, 35); + return jjMoveStringLiteralDfa8_0(active0, 0x2010400L); + case 105: + return jjMoveStringLiteralDfa8_0(active0, 0x800000L); + case 108: + if ((active0 & 0x4000000000000L) != 0L) + return jjStartNfaWithStates_0(7, 50, 35); + return jjMoveStringLiteralDfa8_0(active0, 0x1320800L); + case 109: + return jjMoveStringLiteralDfa8_0(active0, 0x80000L); + case 110: + return jjMoveStringLiteralDfa8_0(active0, 0x8000L); + case 111: + return jjMoveStringLiteralDfa8_0(active0, 0x100000000000L); + case 114: + return jjMoveStringLiteralDfa8_0(active0, 0x4004000L); + default : + break; + } + return jjStartNfa_0(6, active0, 0L); +} +private final int jjMoveStringLiteralDfa8_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(6, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(7, active0, 0L); + return 8; + } + switch(curChar) + { + case 97: + return jjMoveStringLiteralDfa9_0(active0, 0x1008000L); + case 101: + if ((active0 & 0x200L) != 0L) + return jjStartNfaWithStates_0(8, 9, 35); + else if ((active0 & 0x20000L) != 0L) + return jjStartNfaWithStates_0(8, 17, 35); + return jjMoveStringLiteralDfa9_0(active0, 0x84000L); + case 107: + return jjMoveStringLiteralDfa9_0(active0, 0x342000L); + case 110: + if ((active0 & 0x100000000000L) != 0L) + return jjStartNfaWithStates_0(8, 44, 35); + break; + case 111: + return jjMoveStringLiteralDfa9_0(active0, 0x800000L); + case 115: + if ((active0 & 0x4000000L) != 0L) + return jjStartNfaWithStates_0(8, 26, 35); + return jjMoveStringLiteralDfa9_0(active0, 0x2010400L); + case 117: + return jjMoveStringLiteralDfa9_0(active0, 0x800L); + default : + break; + } + return jjStartNfa_0(7, active0, 0L); +} +private final int jjMoveStringLiteralDfa9_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(7, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(8, active0, 0L); + return 9; + } + switch(curChar) + { + case 95: + return jjMoveStringLiteralDfa10_0(active0, 0x300000L); + case 97: + return jjMoveStringLiteralDfa10_0(active0, 0x42000L); + case 98: + return jjMoveStringLiteralDfa10_0(active0, 0x1000000L); + case 100: + return jjMoveStringLiteralDfa10_0(active0, 0x800L); + case 102: + return jjMoveStringLiteralDfa10_0(active0, 0x4000L); + case 109: + return jjMoveStringLiteralDfa10_0(active0, 0x8000L); + case 110: + return jjMoveStringLiteralDfa10_0(active0, 0x800000L); + case 112: + return jjMoveStringLiteralDfa10_0(active0, 0x2010400L); + case 115: + return jjMoveStringLiteralDfa10_0(active0, 0x80000L); + default : + break; + } + return jjStartNfa_0(8, active0, 0L); +} +private final int jjMoveStringLiteralDfa10_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(8, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(9, active0, 0L); + return 10; + } + switch(curChar) + { + case 97: + return jjMoveStringLiteralDfa11_0(active0, 0x2810400L); + case 99: + return jjMoveStringLiteralDfa11_0(active0, 0x100000L); + case 101: + if ((active0 & 0x800L) != 0L) + return jjStartNfaWithStates_0(10, 11, 35); + return jjMoveStringLiteralDfa11_0(active0, 0x8000L); + case 103: + return jjMoveStringLiteralDfa11_0(active0, 0x42000L); + case 105: + return jjMoveStringLiteralDfa11_0(active0, 0x4000L); + case 108: + return jjMoveStringLiteralDfa11_0(active0, 0x1000000L); + case 112: + return jjMoveStringLiteralDfa11_0(active0, 0x280000L); + default : + break; + } + return jjStartNfa_0(9, active0, 0L); +} +private final int jjMoveStringLiteralDfa11_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(9, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(10, active0, 0L); + return 11; + } + switch(curChar) + { + case 97: + return jjMoveStringLiteralDfa12_0(active0, 0x180000L); + case 99: + return jjMoveStringLiteralDfa12_0(active0, 0x2010400L); + case 101: + if ((active0 & 0x2000L) != 0L) + return jjStartNfaWithStates_0(11, 13, 35); + else if ((active0 & 0x40000L) != 0L) + return jjStartNfaWithStates_0(11, 18, 35); + else if ((active0 & 0x1000000L) != 0L) + return jjStartNfaWithStates_0(11, 24, 35); + break; + case 108: + if ((active0 & 0x800000L) != 0L) + return jjStartNfaWithStates_0(11, 23, 35); + break; + case 114: + return jjMoveStringLiteralDfa12_0(active0, 0x200000L); + case 115: + return jjMoveStringLiteralDfa12_0(active0, 0x8000L); + case 120: + if ((active0 & 0x4000L) != 0L) + return jjStartNfaWithStates_0(11, 14, 35); + break; + default : + break; + } + return jjStartNfa_0(10, active0, 0L); +} +private final int jjMoveStringLiteralDfa12_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(10, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(11, active0, 0L); + return 12; + } + switch(curChar) + { + case 99: + return jjMoveStringLiteralDfa13_0(active0, 0x80000L); + case 101: + if ((active0 & 0x400L) != 0L) + return jjStartNfaWithStates_0(12, 10, 35); + else if ((active0 & 0x10000L) != 0L) + return jjStartNfaWithStates_0(12, 16, 35); + else if ((active0 & 0x2000000L) != 0L) + return jjStartNfaWithStates_0(12, 25, 35); + return jjMoveStringLiteralDfa13_0(active0, 0x200000L); + case 112: + return jjMoveStringLiteralDfa13_0(active0, 0x8000L); + case 116: + return jjMoveStringLiteralDfa13_0(active0, 0x100000L); + default : + break; + } + return jjStartNfa_0(11, active0, 0L); +} +private final int jjMoveStringLiteralDfa13_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(11, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(12, active0, 0L); + return 13; + } + switch(curChar) + { + case 97: + return jjMoveStringLiteralDfa14_0(active0, 0x8000L); + case 101: + if ((active0 & 0x80000L) != 0L) + return jjStartNfaWithStates_0(13, 19, 35); + return jjMoveStringLiteralDfa14_0(active0, 0x100000L); + case 102: + return jjMoveStringLiteralDfa14_0(active0, 0x200000L); + default : + break; + } + return jjStartNfa_0(12, active0, 0L); +} +private final int jjMoveStringLiteralDfa14_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(12, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(13, active0, 0L); + return 14; + } + switch(curChar) + { + case 99: + return jjMoveStringLiteralDfa15_0(active0, 0x8000L); + case 103: + return jjMoveStringLiteralDfa15_0(active0, 0x100000L); + case 105: + return jjMoveStringLiteralDfa15_0(active0, 0x200000L); + default : + break; + } + return jjStartNfa_0(13, active0, 0L); +} +private final int jjMoveStringLiteralDfa15_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(13, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(14, active0, 0L); + return 15; + } + switch(curChar) + { + case 101: + if ((active0 & 0x8000L) != 0L) + return jjStartNfaWithStates_0(15, 15, 35); + break; + case 111: + return jjMoveStringLiteralDfa16_0(active0, 0x100000L); + case 120: + if ((active0 & 0x200000L) != 0L) + return jjStartNfaWithStates_0(15, 21, 35); + break; + default : + break; + } + return jjStartNfa_0(14, active0, 0L); +} +private final int jjMoveStringLiteralDfa16_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(14, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(15, active0, 0L); + return 16; + } + switch(curChar) + { + case 114: + return jjMoveStringLiteralDfa17_0(active0, 0x100000L); + default : + break; + } + return jjStartNfa_0(15, active0, 0L); +} +private final int jjMoveStringLiteralDfa17_0(long old0, long active0) +{ + if (((active0 &= old0)) == 0L) + return jjStartNfa_0(15, old0, 0L); + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { + jjStopStringLiteralDfa_0(16, active0, 0L); + return 17; + } + switch(curChar) + { + case 121: + if ((active0 & 0x100000L) != 0L) + return jjStartNfaWithStates_0(17, 20, 35); + break; + default : + break; + } + return jjStartNfa_0(16, active0, 0L); +} +private final void jjCheckNAdd(int state) +{ + if (jjrounds[state] != jjround) + { + jjstateSet[jjnewStateCnt++] = state; + jjrounds[state] = jjround; + } +} +private final void jjAddStates(int start, int end) +{ + do { + jjstateSet[jjnewStateCnt++] = jjnextStates[start]; + } while (start++ != end); +} +private final void jjCheckNAddTwoStates(int state1, int state2) +{ + jjCheckNAdd(state1); + jjCheckNAdd(state2); +} +private final void jjCheckNAddStates(int start, int end) +{ + do { + jjCheckNAdd(jjnextStates[start]); + } while (start++ != end); +} +private final void jjCheckNAddStates(int start) +{ + jjCheckNAdd(jjnextStates[start]); + jjCheckNAdd(jjnextStates[start + 1]); +} +static final long[] jjbitVec0 = { + 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL +}; +private final int jjMoveNfa_0(int startState, int curPos) +{ + int[] nextStates; + int startsAt = 0; + jjnewStateCnt = 35; + int i = 1; + jjstateSet[0] = startState; + int j, kind = 0x7fffffff; + for (;;) + { + if (++jjround == 0x7fffffff) + ReInitRounds(); + if (curChar < 64) + { + long l = 1L << curChar; + MatchLoop: do + { + switch(jjstateSet[--i]) + { + case 35: + if ((0x3ff600000000000L & l) != 0L) + { + if (kind > 57) + kind = 57; + jjCheckNAdd(15); + } + if ((0x3ff400000000000L & l) != 0L) + { + if (kind > 53) + kind = 53; + jjCheckNAdd(7); + } + break; + case 0: + if ((0x3ff000000000000L & l) != 0L) + { + if (kind > 51) + kind = 51; + jjCheckNAdd(5); + } + else if ((0x280000000000L & l) != 0L) + jjCheckNAddStates(0, 2); + else if (curChar == 47) + jjAddStates(3, 4); + else if (curChar == 39) + jjCheckNAddTwoStates(12, 13); + else if (curChar == 34) + jjCheckNAddTwoStates(9, 10); + else if (curChar == 35) + jjCheckNAddStates(5, 7); + if (curChar == 45) + { + if (kind > 57) + kind = 57; + jjCheckNAdd(15); + } + break; + case 1: + if ((0xfffffffffffffbffL & l) != 0L) + jjCheckNAddStates(5, 7); + break; + case 2: + if ((0x2400L & l) != 0L && kind > 5) + kind = 5; + break; + case 3: + if (curChar == 10 && kind > 5) + kind = 5; + break; + case 4: + if (curChar == 13) + jjstateSet[jjnewStateCnt++] = 3; + break; + case 5: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 51) + kind = 51; + jjCheckNAdd(5); + break; + case 7: + if ((0x3ff400000000000L & l) == 0L) + break; + if (kind > 53) + kind = 53; + jjCheckNAdd(7); + break; + case 8: + if (curChar == 34) + jjCheckNAddTwoStates(9, 10); + break; + case 9: + if ((0xfffffffbffffffffL & l) != 0L) + jjCheckNAddTwoStates(9, 10); + break; + case 10: + if (curChar == 34 && kind > 56) + kind = 56; + break; + case 11: + if (curChar == 39) + jjCheckNAddTwoStates(12, 13); + break; + case 12: + if ((0xffffff7fffffffffL & l) != 0L) + jjCheckNAddTwoStates(12, 13); + break; + case 13: + if (curChar == 39 && kind > 56) + kind = 56; + break; + case 14: + if (curChar != 45) + break; + if (kind > 57) + kind = 57; + jjCheckNAdd(15); + break; + case 15: + if ((0x3ff600000000000L & l) == 0L) + break; + if (kind > 57) + kind = 57; + jjCheckNAdd(15); + break; + case 16: + if (curChar == 47) + jjAddStates(3, 4); + break; + case 17: + if (curChar == 47) + jjCheckNAddStates(8, 10); + break; + case 18: + if ((0xffffffffffffdbffL & l) != 0L) + jjCheckNAddStates(8, 10); + break; + case 19: + if ((0x2400L & l) != 0L && kind > 6) + kind = 6; + break; + case 20: + if (curChar == 10 && kind > 6) + kind = 6; + break; + case 21: + if (curChar == 13) + jjstateSet[jjnewStateCnt++] = 20; + break; + case 22: + if (curChar == 42) + jjCheckNAddTwoStates(23, 24); + break; + case 23: + if ((0xfffffbffffffffffL & l) != 0L) + jjCheckNAddTwoStates(23, 24); + break; + case 24: + if (curChar == 42) + jjAddStates(11, 12); + break; + case 25: + if ((0xffff7fffffffffffL & l) != 0L) + jjCheckNAddTwoStates(26, 24); + break; + case 26: + if ((0xfffffbffffffffffL & l) != 0L) + jjCheckNAddTwoStates(26, 24); + break; + case 27: + if (curChar == 47 && kind > 7) + kind = 7; + break; + case 28: + if ((0x280000000000L & l) != 0L) + jjCheckNAddStates(0, 2); + break; + case 29: + if ((0x3ff000000000000L & l) != 0L) + jjCheckNAddTwoStates(29, 30); + break; + case 30: + if (curChar == 46) + jjCheckNAdd(31); + break; + case 31: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 52) + kind = 52; + jjCheckNAddTwoStates(31, 32); + break; + case 33: + if ((0x280000000000L & l) != 0L) + jjCheckNAdd(34); + break; + case 34: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 52) + kind = 52; + jjCheckNAdd(34); + break; + default : break; + } + } while(i != startsAt); + } + else if (curChar < 128) + { + long l = 1L << (curChar & 077); + MatchLoop: do + { + switch(jjstateSet[--i]) + { + case 35: + if ((0x7fffffe87fffffeL & l) != 0L) + { + if (kind > 57) + kind = 57; + jjCheckNAdd(15); + } + if ((0x7fffffe87fffffeL & l) != 0L) + { + if (kind > 53) + kind = 53; + jjCheckNAdd(7); + } + break; + case 0: + if ((0x7fffffe07fffffeL & l) != 0L) + { + if (kind > 57) + kind = 57; + jjCheckNAdd(15); + } + if ((0x7fffffe07fffffeL & l) != 0L) + { + if (kind > 53) + kind = 53; + jjCheckNAdd(7); + } + break; + case 1: + jjAddStates(5, 7); + break; + case 6: + if ((0x7fffffe07fffffeL & l) == 0L) + break; + if (kind > 53) + kind = 53; + jjCheckNAdd(7); + break; + case 7: + if ((0x7fffffe87fffffeL & l) == 0L) + break; + if (kind > 53) + kind = 53; + jjCheckNAdd(7); + break; + case 9: + jjAddStates(13, 14); + break; + case 12: + jjAddStates(15, 16); + break; + case 14: + if ((0x7fffffe07fffffeL & l) == 0L) + break; + if (kind > 57) + kind = 57; + jjCheckNAdd(15); + break; + case 15: + if ((0x7fffffe87fffffeL & l) == 0L) + break; + if (kind > 57) + kind = 57; + jjCheckNAdd(15); + break; + case 18: + jjAddStates(8, 10); + break; + case 23: + jjCheckNAddTwoStates(23, 24); + break; + case 25: + case 26: + jjCheckNAddTwoStates(26, 24); + break; + case 32: + if ((0x2000000020L & l) != 0L) + jjAddStates(17, 18); + break; + default : break; + } + } while(i != startsAt); + } + else + { + int i2 = (curChar & 0xff) >> 6; + long l2 = 1L << (curChar & 077); + MatchLoop: do + { + switch(jjstateSet[--i]) + { + case 1: + if ((jjbitVec0[i2] & l2) != 0L) + jjAddStates(5, 7); + break; + case 9: + if ((jjbitVec0[i2] & l2) != 0L) + jjAddStates(13, 14); + break; + case 12: + if ((jjbitVec0[i2] & l2) != 0L) + jjAddStates(15, 16); + break; + case 18: + if ((jjbitVec0[i2] & l2) != 0L) + jjAddStates(8, 10); + break; + case 23: + if ((jjbitVec0[i2] & l2) != 0L) + jjCheckNAddTwoStates(23, 24); + break; + case 25: + case 26: + if ((jjbitVec0[i2] & l2) != 0L) + jjCheckNAddTwoStates(26, 24); + break; + default : break; + } + } while(i != startsAt); + } + if (kind != 0x7fffffff) + { + jjmatchedKind = kind; + jjmatchedPos = curPos; + kind = 0x7fffffff; + } + ++curPos; + if ((i = jjnewStateCnt) == (startsAt = 35 - (jjnewStateCnt = startsAt))) + return curPos; + try { curChar = input_stream.readChar(); } + catch(java.io.IOException e) { return curPos; } + } +} +static final int[] jjnextStates = { + 5, 29, 30, 17, 22, 1, 2, 4, 18, 19, 21, 25, 27, 9, 10, 12, + 13, 33, 34, +}; +public static final String[] jjstrLiteralImages = { +"", null, null, null, null, null, null, null, "\143\157\156\163\164", +"\156\141\155\145\163\160\141\143\145", "\143\160\160\137\156\141\155\145\163\160\141\143\145", +"\143\160\160\137\151\156\143\154\165\144\145", "\143\160\160\137\164\171\160\145", +"\152\141\166\141\137\160\141\143\153\141\147\145", "\143\157\143\157\141\137\160\162\145\146\151\170", +"\143\163\150\141\162\160\137\156\141\155\145\163\160\141\143\145", "\160\150\160\137\156\141\155\145\163\160\141\143\145", +"\160\171\137\155\157\144\165\154\145", "\160\145\162\154\137\160\141\143\153\141\147\145", +"\162\165\142\171\137\156\141\155\145\163\160\141\143\145", "\163\155\141\154\154\164\141\154\153\137\143\141\164\145\147\157\162\171", +"\163\155\141\154\154\164\141\154\153\137\160\162\145\146\151\170", "\170\163\144\137\141\154\154", +"\170\163\144\137\157\160\164\151\157\156\141\154", "\170\163\144\137\156\151\154\154\141\142\154\145", +"\170\163\144\137\156\141\155\145\163\160\141\143\145", "\170\163\144\137\141\164\164\162\163", "\151\156\143\154\165\144\145", +"\166\157\151\144", "\142\157\157\154", "\142\171\164\145", "\151\61\66", "\151\63\62", +"\151\66\64", "\144\157\165\142\154\145", "\163\164\162\151\156\147", +"\163\154\151\163\164", "\163\145\156\165\155", "\155\141\160", "\154\151\163\164", "\163\145\164", +"\141\163\171\156\143", "\164\171\160\145\144\145\146", "\163\164\162\165\143\164", +"\145\170\143\145\160\164\151\157\156", "\145\170\164\145\156\144\163", "\164\150\162\157\167\163", +"\163\145\162\166\151\143\145", "\145\156\165\155", "\162\145\161\165\151\162\145\144", +"\157\160\164\151\157\156\141\154", null, null, null, null, null, null, null, "\54", "\73", "\173", "\175", "\75", +"\133", "\135", "\72", "\50", "\51", "\74", "\76", }; +public static final String[] lexStateNames = { + "DEFAULT", +}; +static final long[] jjtoToken = { + 0xff3fffffffffff01L, 0x3fL, +}; +static final long[] jjtoSkip = { + 0xfeL, 0x0L, +}; +protected SimpleCharStream input_stream; +private final int[] jjrounds = new int[35]; +private final int[] jjstateSet = new int[70]; +protected char curChar; +public thrift_grammarTokenManager(SimpleCharStream stream){ + if (SimpleCharStream.staticFlag) + throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); + input_stream = stream; +} +public thrift_grammarTokenManager(SimpleCharStream stream, int lexState){ + this(stream); + SwitchTo(lexState); +} +public void ReInit(SimpleCharStream stream) +{ + jjmatchedPos = jjnewStateCnt = 0; + curLexState = defaultLexState; + input_stream = stream; + ReInitRounds(); +} +private final void ReInitRounds() +{ + int i; + jjround = 0x80000001; + for (i = 35; i-- > 0;) + jjrounds[i] = 0x80000000; +} +public void ReInit(SimpleCharStream stream, int lexState) +{ + ReInit(stream); + SwitchTo(lexState); +} +public void SwitchTo(int lexState) +{ + if (lexState >= 1 || lexState < 0) + throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); + else + curLexState = lexState; +} + +protected Token jjFillToken() +{ + Token t = Token.newToken(jjmatchedKind); + t.kind = jjmatchedKind; + String im = jjstrLiteralImages[jjmatchedKind]; + t.image = (im == null) ? input_stream.GetImage() : im; + t.beginLine = input_stream.getBeginLine(); + t.beginColumn = input_stream.getBeginColumn(); + t.endLine = input_stream.getEndLine(); + t.endColumn = input_stream.getEndColumn(); + return t; +} + +int curLexState = 0; +int defaultLexState = 0; +int jjnewStateCnt; +int jjround; +int jjmatchedPos; +int jjmatchedKind; + +public Token getNextToken() +{ + int kind; + Token specialToken = null; + Token matchedToken; + int curPos = 0; + + EOFLoop : + for (;;) + { + try + { + curChar = input_stream.BeginToken(); + } + catch(java.io.IOException e) + { + jjmatchedKind = 0; + matchedToken = jjFillToken(); + return matchedToken; + } + + try { input_stream.backup(0); + while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L) + curChar = input_stream.BeginToken(); + } + catch (java.io.IOException e1) { continue EOFLoop; } + jjmatchedKind = 0x7fffffff; + jjmatchedPos = 0; + curPos = jjMoveStringLiteralDfa0_0(); + if (jjmatchedKind != 0x7fffffff) + { + if (jjmatchedPos + 1 < curPos) + input_stream.backup(curPos - jjmatchedPos - 1); + if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) + { + matchedToken = jjFillToken(); + return matchedToken; + } + else + { + continue EOFLoop; + } + } + int error_line = input_stream.getEndLine(); + int error_column = input_stream.getEndColumn(); + String error_after = null; + boolean EOFSeen = false; + try { input_stream.readChar(); input_stream.backup(1); } + catch (java.io.IOException e1) { + EOFSeen = true; + error_after = curPos <= 1 ? "" : input_stream.GetImage(); + if (curChar == '\n' || curChar == '\r') { + error_line++; + error_column = 0; + } + else + error_column++; + } + if (!EOFSeen) { + input_stream.backup(1); + error_after = curPos <= 1 ? "" : input_stream.GetImage(); + } + throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); + } +} + +} diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java new file mode 100644 index 000000000..8960e8e9a --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java @@ -0,0 +1,105 @@ +/* Generated By:JJTree: Do not edit this line. /data/users/zshao/tubbs-svnroot/projects/hadoop/trunk/VENDOR/hadoop-0.17/src/contrib/hive/serde/src/gen-java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammarTreeConstants.java */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +public interface thrift_grammarTreeConstants +{ + public int JJTSTART = 0; + public int JJTHEADERLIST = 1; + public int JJTHEADER = 2; + public int JJTNAMESPACE = 3; + public int JJTINCLUDE = 4; + public int JJTDEFINITION = 5; + public int JJTTYPEDEFINITION = 6; + public int JJTTYPEDEF = 7; + public int JJTCOMMAORSEMICOLON = 8; + public int JJTENUM = 9; + public int JJTENUMDEFLIST = 10; + public int JJTENUMDEF = 11; + public int JJTSENUM = 12; + public int JJTSENUMDEFLIST = 13; + public int JJTSENUMDEF = 14; + public int JJTCONST = 15; + public int JJTCONSTVALUE = 16; + public int JJTCONSTLIST = 17; + public int JJTCONSTLISTCONTENTS = 18; + public int JJTCONSTMAP = 19; + public int JJTCONSTMAPCONTENTS = 20; + public int JJTSTRUCT = 21; + public int JJTXCEPTION = 22; + public int JJTSERVICE = 23; + public int JJTFLAGARGS = 24; + public int JJTUNFLAGARGS = 25; + public int JJTEXTENDS = 26; + public int JJTFUNCTION = 27; + public int JJTASYNC = 28; + public int JJTTHROWS = 29; + public int JJTFIELDLIST = 30; + public int JJTFIELD = 31; + public int JJTFIELDREQUIREDNESS = 32; + public int JJTFIELDVALUE = 33; + public int JJTDEFINITIONTYPE = 34; + public int JJTFUNCTIONTYPE = 35; + public int JJTFIELDTYPE = 36; + public int JJTTYPESTRING = 37; + public int JJTTYPEBYTE = 38; + public int JJTTYPEI16 = 39; + public int JJTTYPEI32 = 40; + public int JJTTYPEI64 = 41; + public int JJTTYPEDOUBLE = 42; + public int JJTTYPEBOOL = 43; + public int JJTTYPEMAP = 44; + public int JJTTYPESET = 45; + public int JJTTYPELIST = 46; + + + public String[] jjtNodeName = { + "Start", + "HeaderList", + "Header", + "Namespace", + "Include", + "Definition", + "TypeDefinition", + "Typedef", + "CommaOrSemicolon", + "Enum", + "EnumDefList", + "EnumDef", + "Senum", + "SenumDefList", + "SenumDef", + "Const", + "ConstValue", + "ConstList", + "ConstListContents", + "ConstMap", + "ConstMapContents", + "Struct", + "Xception", + "Service", + "FlagArgs", + "UnflagArgs", + "Extends", + "Function", + "Async", + "Throws", + "FieldList", + "Field", + "FieldRequiredness", + "FieldValue", + "DefinitionType", + "FunctionType", + "FieldType", + "TypeString", + "TypeByte", + "Typei16", + "Typei32", + "Typei64", + "TypeDouble", + "TypeBool", + "TypeMap", + "TypeSet", + "TypeList", + }; +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/thrift/Complex.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/Complex.java similarity index 61% rename from ql/src/java/org/apache/hadoop/hive/ql/thrift/Complex.java rename to serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/Complex.java index 223d010bd..b2999d923 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/thrift/Complex.java +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/Complex.java @@ -1,27 +1,9 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - /** * Autogenerated by Thrift * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING */ -package org.apache.hadoop.hive.ql.thrift; +package org.apache.hadoop.hive.serde2.thrift.test; import java.util.List; import java.util.ArrayList; @@ -36,20 +18,20 @@ public class Complex implements TBase, java.io.Serializable { public int aint; - public String astring; + public String aString; public List lint; - public List lstring; - public List lintstring; - public Map mstringstring; + public List lString; + public List lintString; + public Map mStringString; public final Isset __isset = new Isset(); public static final class Isset implements java.io.Serializable { public boolean aint = false; - public boolean astring = false; + public boolean aString = false; public boolean lint = false; - public boolean lstring = false; - public boolean lintstring = false; - public boolean mstringstring = false; + public boolean lString = false; + public boolean lintString = false; + public boolean mStringString = false; } public Complex() { @@ -57,25 +39,25 @@ public Complex() { public Complex( int aint, - String astring, + String aString, List lint, - List lstring, - List lintstring, - Map mstringstring) + List lString, + List lintString, + Map mStringString) { this(); this.aint = aint; this.__isset.aint = true; - this.astring = astring; - this.__isset.astring = true; + this.aString = aString; + this.__isset.aString = true; this.lint = lint; this.__isset.lint = true; - this.lstring = lstring; - this.__isset.lstring = true; - this.lintstring = lintstring; - this.__isset.lintstring = true; - this.mstringstring = mstringstring; - this.__isset.mstringstring = true; + this.lString = lString; + this.__isset.lString = true; + this.lintString = lintString; + this.__isset.lintString = true; + this.mStringString = mStringString; + this.__isset.mStringString = true; } public boolean equals(Object that) { @@ -99,12 +81,12 @@ public boolean equals(Complex that) { return false; } - boolean this_present_astring = true && (this.astring != null); - boolean that_present_astring = true && (that.astring != null); - if (this_present_astring || that_present_astring) { - if (!(this_present_astring && that_present_astring)) + boolean this_present_aString = true && (this.aString != null); + boolean that_present_aString = true && (that.aString != null); + if (this_present_aString || that_present_aString) { + if (!(this_present_aString && that_present_aString)) return false; - if (!this.astring.equals(that.astring)) + if (!this.aString.equals(that.aString)) return false; } @@ -117,30 +99,30 @@ public boolean equals(Complex that) { return false; } - boolean this_present_lstring = true && (this.lstring != null); - boolean that_present_lstring = true && (that.lstring != null); - if (this_present_lstring || that_present_lstring) { - if (!(this_present_lstring && that_present_lstring)) + boolean this_present_lString = true && (this.lString != null); + boolean that_present_lString = true && (that.lString != null); + if (this_present_lString || that_present_lString) { + if (!(this_present_lString && that_present_lString)) return false; - if (!this.lstring.equals(that.lstring)) + if (!this.lString.equals(that.lString)) return false; } - boolean this_present_lintstring = true && (this.lintstring != null); - boolean that_present_lintstring = true && (that.lintstring != null); - if (this_present_lintstring || that_present_lintstring) { - if (!(this_present_lintstring && that_present_lintstring)) + boolean this_present_lintString = true && (this.lintString != null); + boolean that_present_lintString = true && (that.lintString != null); + if (this_present_lintString || that_present_lintString) { + if (!(this_present_lintString && that_present_lintString)) return false; - if (!this.lintstring.equals(that.lintstring)) + if (!this.lintString.equals(that.lintString)) return false; } - boolean this_present_mstringstring = true && (this.mstringstring != null); - boolean that_present_mstringstring = true && (that.mstringstring != null); - if (this_present_mstringstring || that_present_mstringstring) { - if (!(this_present_mstringstring && that_present_mstringstring)) + boolean this_present_mStringString = true && (this.mStringString != null); + boolean that_present_mStringString = true && (that.mStringString != null); + if (this_present_mStringString || that_present_mStringString) { + if (!(this_present_mStringString && that_present_mStringString)) return false; - if (!this.mstringstring.equals(that.mstringstring)) + if (!this.mStringString.equals(that.mStringString)) return false; } @@ -172,8 +154,8 @@ public void read(TProtocol iprot) throws TException { break; case 2: if (field.type == TType.STRING) { - this.astring = iprot.readString(); - this.__isset.astring = true; + this.aString = iprot.readString(); + this.__isset.aString = true; } else { TProtocolUtil.skip(iprot, field.type); } @@ -200,16 +182,16 @@ public void read(TProtocol iprot) throws TException { if (field.type == TType.LIST) { { TList _list3 = iprot.readListBegin(); - this.lstring = new ArrayList(_list3.size); + this.lString = new ArrayList(_list3.size); for (int _i4 = 0; _i4 < _list3.size; ++_i4) { String _elem5 = null; _elem5 = iprot.readString(); - this.lstring.add(_elem5); + this.lString.add(_elem5); } iprot.readListEnd(); } - this.__isset.lstring = true; + this.__isset.lString = true; } else { TProtocolUtil.skip(iprot, field.type); } @@ -218,17 +200,17 @@ public void read(TProtocol iprot) throws TException { if (field.type == TType.LIST) { { TList _list6 = iprot.readListBegin(); - this.lintstring = new ArrayList(_list6.size); + this.lintString = new ArrayList(_list6.size); for (int _i7 = 0; _i7 < _list6.size; ++_i7) { IntString _elem8 = new IntString(); _elem8 = new IntString(); _elem8.read(iprot); - this.lintstring.add(_elem8); + this.lintString.add(_elem8); } iprot.readListEnd(); } - this.__isset.lintstring = true; + this.__isset.lintString = true; } else { TProtocolUtil.skip(iprot, field.type); } @@ -237,18 +219,18 @@ public void read(TProtocol iprot) throws TException { if (field.type == TType.MAP) { { TMap _map9 = iprot.readMapBegin(); - this.mstringstring = new HashMap(2*_map9.size); + this.mStringString = new HashMap(2*_map9.size); for (int _i10 = 0; _i10 < _map9.size; ++_i10) { String _key11; String _val12; _key11 = iprot.readString(); _val12 = iprot.readString(); - this.mstringstring.put(_key11, _val12); + this.mStringString.put(_key11, _val12); } iprot.readMapEnd(); } - this.__isset.mstringstring = true; + this.__isset.mStringString = true; } else { TProtocolUtil.skip(iprot, field.type); } @@ -272,12 +254,12 @@ public void write(TProtocol oprot) throws TException { oprot.writeFieldBegin(field); oprot.writeI32(this.aint); oprot.writeFieldEnd(); - if (this.astring != null) { - field.name = "astring"; + if (this.aString != null) { + field.name = "aString"; field.type = TType.STRING; field.id = 2; oprot.writeFieldBegin(field); - oprot.writeString(this.astring); + oprot.writeString(this.aString); oprot.writeFieldEnd(); } if (this.lint != null) { @@ -294,44 +276,44 @@ public void write(TProtocol oprot) throws TException { } oprot.writeFieldEnd(); } - if (this.lstring != null) { - field.name = "lstring"; + if (this.lString != null) { + field.name = "lString"; field.type = TType.LIST; field.id = 4; oprot.writeFieldBegin(field); { - oprot.writeListBegin(new TList(TType.STRING, this.lstring.size())); - for (String _iter14 : this.lstring) { + oprot.writeListBegin(new TList(TType.STRING, this.lString.size())); + for (String _iter14 : this.lString) { oprot.writeString(_iter14); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } - if (this.lintstring != null) { - field.name = "lintstring"; + if (this.lintString != null) { + field.name = "lintString"; field.type = TType.LIST; field.id = 5; oprot.writeFieldBegin(field); { - oprot.writeListBegin(new TList(TType.STRUCT, this.lintstring.size())); - for (IntString _iter15 : this.lintstring) { + oprot.writeListBegin(new TList(TType.STRUCT, this.lintString.size())); + for (IntString _iter15 : this.lintString) { _iter15.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } - if (this.mstringstring != null) { - field.name = "mstringstring"; + if (this.mStringString != null) { + field.name = "mStringString"; field.type = TType.MAP; field.id = 6; oprot.writeFieldBegin(field); { - oprot.writeMapBegin(new TMap(TType.STRING, TType.STRING, this.mstringstring.size())); - for (String _iter16 : this.mstringstring.keySet()) { + oprot.writeMapBegin(new TMap(TType.STRING, TType.STRING, this.mStringString.size())); + for (String _iter16 : this.mStringString.keySet()) { oprot.writeString(_iter16); - oprot.writeString(this.mstringstring.get(_iter16)); + oprot.writeString(this.mStringString.get(_iter16)); } oprot.writeMapEnd(); } @@ -345,16 +327,16 @@ public String toString() { StringBuilder sb = new StringBuilder("Complex("); sb.append("aint:"); sb.append(this.aint); - sb.append(",astring:"); - sb.append(this.astring); + sb.append(",aString:"); + sb.append(this.aString); sb.append(",lint:"); sb.append(this.lint); - sb.append(",lstring:"); - sb.append(this.lstring); - sb.append(",lintstring:"); - sb.append(this.lintstring); - sb.append(",mstringstring:"); - sb.append(this.mstringstring); + sb.append(",lString:"); + sb.append(this.lString); + sb.append(",lintString:"); + sb.append(this.lintString); + sb.append(",mStringString:"); + sb.append(this.mStringString); sb.append(")"); return sb.toString(); } diff --git a/serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/Constants.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/Constants.java new file mode 100644 index 000000000..c8802ae8a --- /dev/null +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/Constants.java @@ -0,0 +1,18 @@ +/** + * Autogenerated by Thrift + * + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING + */ +package org.apache.hadoop.hive.serde2.thrift.test; + +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.Set; +import java.util.HashSet; +import com.facebook.thrift.*; + +public class Constants { + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/thrift/IntString.java b/serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/IntString.java similarity index 65% rename from ql/src/java/org/apache/hadoop/hive/ql/thrift/IntString.java rename to serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/IntString.java index 6515cd2c9..29264ce03 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/thrift/IntString.java +++ b/serde/src/gen-java/org/apache/hadoop/hive/serde2/thrift/test/IntString.java @@ -1,27 +1,9 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - /** * Autogenerated by Thrift * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING */ -package org.apache.hadoop.hive.ql.thrift; +package org.apache.hadoop.hive.serde2.thrift.test; import java.util.List; import java.util.ArrayList; @@ -36,12 +18,12 @@ public class IntString implements TBase, java.io.Serializable { public int myint; - public String mystring; + public String myString; public final Isset __isset = new Isset(); public static final class Isset implements java.io.Serializable { public boolean myint = false; - public boolean mystring = false; + public boolean myString = false; } public IntString() { @@ -49,13 +31,13 @@ public IntString() { public IntString( int myint, - String mystring) + String myString) { this(); this.myint = myint; this.__isset.myint = true; - this.mystring = mystring; - this.__isset.mystring = true; + this.myString = myString; + this.__isset.myString = true; } public boolean equals(Object that) { @@ -79,12 +61,12 @@ public boolean equals(IntString that) { return false; } - boolean this_present_mystring = true && (this.mystring != null); - boolean that_present_mystring = true && (that.mystring != null); - if (this_present_mystring || that_present_mystring) { - if (!(this_present_mystring && that_present_mystring)) + boolean this_present_myString = true && (this.myString != null); + boolean that_present_myString = true && (that.myString != null); + if (this_present_myString || that_present_myString) { + if (!(this_present_myString && that_present_myString)) return false; - if (!this.mystring.equals(that.mystring)) + if (!this.myString.equals(that.myString)) return false; } @@ -116,8 +98,8 @@ public void read(TProtocol iprot) throws TException { break; case 2: if (field.type == TType.STRING) { - this.mystring = iprot.readString(); - this.__isset.mystring = true; + this.myString = iprot.readString(); + this.__isset.myString = true; } else { TProtocolUtil.skip(iprot, field.type); } @@ -141,12 +123,12 @@ public void write(TProtocol oprot) throws TException { oprot.writeFieldBegin(field); oprot.writeI32(this.myint); oprot.writeFieldEnd(); - if (this.mystring != null) { - field.name = "mystring"; + if (this.myString != null) { + field.name = "myString"; field.type = TType.STRING; field.id = 2; oprot.writeFieldBegin(field); - oprot.writeString(this.mystring); + oprot.writeString(this.myString); oprot.writeFieldEnd(); } oprot.writeFieldStop(); @@ -157,8 +139,8 @@ public String toString() { StringBuilder sb = new StringBuilder("IntString("); sb.append("myint:"); sb.append(this.myint); - sb.append(",mystring:"); - sb.append(this.mystring); + sb.append(",myString:"); + sb.append(this.myString); sb.append(")"); return sb.toString(); } diff --git a/serde/src/gen-py/serde/__init__.py b/serde/src/gen-py/org_apache_hadoop_hive_serde/__init__.py similarity index 100% rename from serde/src/gen-py/serde/__init__.py rename to serde/src/gen-py/org_apache_hadoop_hive_serde/__init__.py diff --git a/serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py b/serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py new file mode 100644 index 000000000..d4ff69dfa --- /dev/null +++ b/serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py @@ -0,0 +1,64 @@ +# +# Autogenerated by Thrift +# +# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING +# + +from thrift.Thrift import * +from ttypes import * + +SERIALIZATION_LIB = 'serialization.lib' + +SERIALIZATION_CLASS = 'serialization.class' + +SERIALIZATION_FORMAT = 'serialization.format' + +SERIALIZATION_DDL = 'serialization.ddl' + +FIELD_DELIM = 'field.delim' + +COLLECTION_DELIM = 'colelction.delim' + +LINE_DELIM = 'line.delim' + +MAPKEY_DELIM = 'mapkey.delim' + +TINYINT_TYPE_NAME = 'tinyint' + +INT_TYPE_NAME = 'int' + +BIGINT_TYPE_NAME = 'bigint' + +FLOAT_TYPE_NAME = 'float' + +DOUBLE_TYPE_NAME = 'double' + +STRING_TYPE_NAME = 'string' + +DATE_TYPE_NAME = 'date' + +DATETIME_TYPE_NAME = 'datetime' + +TIMESTAMP_TYPE_NAME = 'timestamp' + +LIST_TYPE_NAME = 'array' + +MAP_TYPE_NAME = 'map' + +PrimitiveTypes = set([ + 'tinyint', + 'int', + 'bigint', + 'float', + 'double', + 'string', + 'date', + 'datetime', + 'timestamp', +]) + +CollectionTypes = set([ + 'array', + 'map', +]) + diff --git a/serde/src/gen-py/serde/ttypes.py b/serde/src/gen-py/org_apache_hadoop_hive_serde/ttypes.py similarity index 100% rename from serde/src/gen-py/serde/ttypes.py rename to serde/src/gen-py/org_apache_hadoop_hive_serde/ttypes.py diff --git a/serde/src/gen-py/serde/constants.py b/serde/src/gen-py/serde/constants.py deleted file mode 100644 index 166a1d9ca..000000000 --- a/serde/src/gen-py/serde/constants.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Autogenerated by Thrift -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# - -from thrift.Thrift import * -from ttypes import * - -SERIALIZATION_LIB = 'serialization.lib' - -SERIALIZATION_CLASS = 'serialization.class' - -SERIALIZATION_FORMAT = 'serialization.format' - -SERIALIZATION_DDL = 'serialization.ddl' - diff --git a/serde/src/java/org/apache/hadoop/hive/serde/ByteStreamTypedSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde/ByteStreamTypedSerDe.java index 5fee41b1d..d34be0d4e 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/ByteStreamTypedSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/ByteStreamTypedSerDe.java @@ -48,6 +48,7 @@ public ByteStreamTypedSerDe(Class argType) throws SerDeException { bos = new ByteStream.Output(); bis = new ByteStream.Input(); cachedBw = new BytesWritable(); + json_serializer = new TSerializer(); } public Object deserialize(Writable field) throws SerDeException { diff --git a/serde/src/java/org/apache/hadoop/hive/serde/ReflectionSerDeField.java b/serde/src/java/org/apache/hadoop/hive/serde/ReflectionSerDeField.java index 789ca83d6..e44948d63 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/ReflectionSerDeField.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/ReflectionSerDeField.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.serde; import java.lang.reflect.*; +import java.util.HashMap; /** * The default implementation of Hive Field based on Java Reflection. @@ -35,6 +36,7 @@ public class ReflectionSerDeField implements SerDeField { protected Class _valueClass; protected Class _keyClass; + private static HashMap cacheFields = new HashMap(); public static boolean isClassPrimitive(Class c) { return ((c == String.class) || (c == Boolean.class) || @@ -46,7 +48,27 @@ public static boolean isClassPrimitive(Class c) { public ReflectionSerDeField(String className, String fieldName) throws SerDeException { try { _parentClass = Class.forName(className); - _field = _parentClass.getDeclaredField(fieldName); + + // hack for now. Get all the fields and do a case-insensitive search over them + // _field = _parentClass.getDeclaredField(fieldName); + Field[] allFields = cacheFields.get(className); + if (allFields == null) { + allFields = _parentClass.getDeclaredFields(); + cacheFields.put(className, allFields); + } + + boolean found = false; + for (Field f: allFields) { + if (f.getName().equalsIgnoreCase(fieldName)) { + _field = f; + found = true; + break; + } + } + + if (!found) + throw new SerDeException("Illegal class or member:"+className+"."+fieldName); + _isList = java.util.List.class.isAssignableFrom(_field.getType()); _isMap = java.util.Map.class.isAssignableFrom(_field.getType()); _class = _field.getType(); @@ -123,7 +145,7 @@ public Class getMapValueType() { } public String getName() { - return _field.getName(); + return _field.getName().toLowerCase(); } diff --git a/serde/src/java/org/apache/hadoop/hive/serde/SerDe.java b/serde/src/java/org/apache/hadoop/hive/serde/SerDe.java index 817c7f198..5e2735728 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/SerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/SerDe.java @@ -91,6 +91,4 @@ public interface SerDe { public SerDeField getFieldFromExpression(SerDeField parentField, String fieldExpression) throws SerDeException; - public String getShortName() ; - } diff --git a/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDe.java index 1b4ad1af9..aed0466c2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/dynamic_type/DynamicSerDe.java @@ -42,25 +42,6 @@ public class DynamicSerDe implements SerDe, Serializable { transient private TProtocol oprot_; transient private TProtocol iprot_; - public String getShortName() { - return shortName(); - } - - public static String shortName() { - return "dynamic_thrift"; - } - - static { - StackTraceElement[] sTrace = new Exception().getStackTrace(); - String className = sTrace[0].getClassName(); - try { - SerDeUtils.registerSerDe(shortName(), Class.forName(className)); - } catch(Exception e) { - throw new RuntimeException(e); - } - } - - public static final String META_TABLE_NAME = "name"; static public void main(String args[]) { diff --git a/serde/src/java/org/apache/hadoop/hive/serde/jute/JuteSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde/jute/JuteSerDe.java index fe040901a..04b12437e 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/jute/JuteSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/jute/JuteSerDe.java @@ -34,24 +34,6 @@ public class JuteSerDe implements SerDe { - public static String shortName() { - return "jute"; - } - - public String getShortName() { - return shortName(); - } - - static { - StackTraceElement[] sTrace = new Exception().getStackTrace(); - String className = sTrace[0].getClassName(); - try { - SerDeUtils.registerSerDe(shortName(), Class.forName(className)); - } catch(Exception e) { - throw new RuntimeException(e); - } - } - protected Class type; private static final Log LOG = LogFactory.getLog("hive.metastore"); public void initialize(Configuration job, Properties tbl) throws SerDeException { diff --git a/serde/src/java/org/apache/hadoop/hive/serde/simple_meta/MetadataTypedColumnsetSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde/simple_meta/MetadataTypedColumnsetSerDe.java index 268744e8f..d3af7ac87 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/simple_meta/MetadataTypedColumnsetSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/simple_meta/MetadataTypedColumnsetSerDe.java @@ -37,25 +37,6 @@ public class MetadataTypedColumnsetSerDe extends ByteStreamTypedSerDe implement protected TIOStreamTransport outTransport, inTransport; protected TProtocol outProtocol, inProtocol; - public String getShortName() { - return shortName(); - } - - - public static String shortName() { - return "simple_meta"; - } - - static { - StackTraceElement[] sTrace = new Exception().getStackTrace(); - String className = sTrace[0].getClassName(); - try { - SerDeUtils.registerSerDe(shortName(), Class.forName(className)); - } catch(Exception e) { - throw new RuntimeException(e); - } - } - final public static String DefaultSeparator = "\001"; protected boolean inStreaming; @@ -92,7 +73,7 @@ public MetadataTypedColumnsetSerDe (Class argType, TProtocolFactory inFactory inTransport = new TIOStreamTransport(bis); outProtocol = outFactory.getProtocol(outTransport); inProtocol = inFactory.getProtocol(inTransport); - json_serializer = new TSerializer(new TJSONProtocol.Factory()); + json_serializer = new TSerializer(new TSimpleJSONProtocol.Factory()); } public void initialize(Configuration job, Properties tbl) throws SerDeException { diff --git a/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftByteStreamTypedSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftByteStreamTypedSerDe.java index e4818ad5e..a7220a45c 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftByteStreamTypedSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftByteStreamTypedSerDe.java @@ -45,31 +45,13 @@ private void init(TProtocolFactory inFactory, TProtocolFactory outFactory) throw inTransport = new TIOStreamTransport(bis); outProtocol = outFactory.getProtocol(outTransport); inProtocol = inFactory.getProtocol(inTransport); - json_serializer = new TSerializer(new TJSONProtocol.Factory()); + json_serializer = new TSerializer(new TSimpleJSONProtocol.Factory()); } public void initialize(Configuration job, Properties tbl) throws SerDeException { throw new SerDeException("ThriftByteStreamTypedSerDe is still semi-abstract"); } - public static String shortName() { - return "thriftbytestream"; - } - - public String getShortName() { - return shortName(); - } - - static { - StackTraceElement[] sTrace = new Exception().getStackTrace(); - String className = sTrace[0].getClassName(); - try { - SerDeUtils.registerSerDe(shortName(), Class.forName(className)); - } catch(Exception e) { - throw new RuntimeException(e); - } - } - public ThriftByteStreamTypedSerDe(Class argType, TProtocolFactory inFactory, TProtocolFactory outFactory) throws SerDeException { super(argType); diff --git a/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftSerDe.java index c66372e71..e015e2230 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftSerDe.java @@ -32,24 +32,6 @@ public class ThriftSerDe implements SerDe { private ThriftByteStreamTypedSerDe tsd; private boolean inStreaming; - public static String shortName() { - return "thrift"; - } - - public String getShortName() { - return shortName(); - } - - static { - StackTraceElement[] sTrace = new Exception().getStackTrace(); - String className = sTrace[0].getClassName(); - try { - SerDeUtils.registerSerDe(shortName(), Class.forName(className)); - } catch(Exception e) { - throw new RuntimeException(e); - } - } - public void initialize(Configuration job, Properties tbl) throws SerDeException { try { // both the classname and the protocol name are Table properties diff --git a/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftSerDeField.java b/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftSerDeField.java index 087d2073b..87d93059c 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftSerDeField.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/thrift/ThriftSerDeField.java @@ -20,6 +20,7 @@ import org.apache.hadoop.hive.serde.*; import java.lang.reflect.*; +import java.util.HashMap; /** * Thrift implementation of SerDeField @@ -32,12 +33,32 @@ public class ThriftSerDeField extends ReflectionSerDeField { private Class issetClass; private Field issetField; private Field fieldIssetField; + private static HashMap cacheFields = new HashMap(); public ThriftSerDeField(String className, String fieldName) throws SerDeException { super(className, fieldName); try { issetClass = Class.forName(className+"$Isset"); - fieldIssetField = issetClass.getDeclaredField(fieldName); + // fieldIssetField = issetClass.getDeclaredField(fieldName); + String name = issetClass.getName(); + Field[] allFields = cacheFields.get(name); + if (allFields == null) { + allFields = issetClass.getDeclaredFields(); + cacheFields.put(name, allFields); + } + + boolean found = false; + for (Field f: allFields) { + if (f.getName().equalsIgnoreCase(fieldName)) { + fieldIssetField = f; + found = true; + break; + } + } + + if (!found) + throw new SerDeException("Not a Thrift Class?"); + issetField = _parentClass.getDeclaredField("__isset"); } catch (Exception e) { throw (new SerDeException("Not a Thrift Class?", e)); diff --git a/serde/src/java/org/apache/hadoop/hive/serde/thrift/columnsetSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde/thrift/columnsetSerDe.java index cae19b237..617124266 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde/thrift/columnsetSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde/thrift/columnsetSerDe.java @@ -18,36 +18,27 @@ package org.apache.hadoop.hive.serde.thrift; -import org.apache.hadoop.hive.serde.*; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Properties; -import com.facebook.thrift.TBase; -import com.facebook.thrift.TSerializer; -import com.facebook.thrift.protocol.*; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.*; -import java.util.*; -import java.io.*; +import org.apache.hadoop.hive.serde.ColumnSet; +import org.apache.hadoop.hive.serde.ComplexSerDeField; +import org.apache.hadoop.hive.serde.ExpressionUtils; +import org.apache.hadoop.hive.serde.ReflectionSerDeField; +import org.apache.hadoop.hive.serde.SerDe; +import org.apache.hadoop.hive.serde.SerDeException; +import org.apache.hadoop.hive.serde.SerDeField; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; + +import com.facebook.thrift.protocol.TBinaryProtocol; public class columnsetSerDe extends ThriftByteStreamTypedSerDe implements SerDe { - public String getShortName() { - return shortName(); - } - - public static String shortName() { - return "columnset"; - } - - static { - StackTraceElement[] sTrace = new Exception().getStackTrace(); - String className = sTrace[0].getClassName(); - try { - SerDeUtils.registerSerDe(shortName(), Class.forName(className)); - } catch(Exception e) { - throw new RuntimeException(e); - } - } - protected boolean inStreaming; private String separator; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java index f4bae75b7..84b9dc5c8 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java @@ -58,6 +58,4 @@ public interface Deserializer { */ public ObjectInspector getObjectInspector() throws SerDeException; - public String getShortName(); - } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java index 50c127bac..535bca9c4 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.serde2; -import java.io.UnsupportedEncodingException; import java.nio.charset.CharacterCodingException; import java.util.ArrayList; import java.util.Arrays; @@ -44,33 +43,24 @@ public class MetadataTypedColumnsetSerDe implements SerDe { public static final Log LOG = LogFactory.getLog(MetadataTypedColumnsetSerDe.class.getName()); - public String getShortName() { - return shortName(); - } - - - public static String shortName() { - return "simple_meta"; - } - static { StackTraceElement[] sTrace = new Exception().getStackTrace(); String className = sTrace[0].getClassName(); try { - SerDeUtils.registerSerDe(shortName(), Class.forName(className)); - // For backward compatibility: this class replaces the following class. - SerDeUtils.registerSerDe("org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe", + // For backward compatibility: this class replaces the columnsetSerDe class. + SerDeUtils.registerSerDe("org.apache.hadoop.hive.serde.thrift.columnsetSerDe", Class.forName(className)); - } catch(Exception e) { + } catch(Exception e) { throw new RuntimeException(e); } } final public static String DefaultSeparator = "\001"; - private String separator; - // constant for now, will make it configurable later. - private String nullString = "\\N"; + + final public static String defaultNullString = "\\N"; + private String nullString; + private List columnNames; private ObjectInspector cachedObjectInspector; @@ -82,20 +72,36 @@ public MetadataTypedColumnsetSerDe() throws SerDeException { separator = DefaultSeparator; } - public void initialize(Configuration job, Properties tbl) throws SerDeException { - separator = DefaultSeparator; - String alt_sep = tbl.getProperty(Constants.SERIALIZATION_FORMAT); - if(alt_sep != null && alt_sep.length() > 0) { + private String getByteValue(String altValue, String defaultVal) { + if (altValue != null && altValue.length() > 0) { try { byte b [] = new byte[1]; - b[0] = Byte.valueOf(alt_sep).byteValue(); - separator = new String(b); + b[0] = Byte.valueOf(altValue).byteValue(); + return new String(b); } catch(NumberFormatException e) { - separator = alt_sep; + return altValue; } } + return defaultVal; + } + + public void initialize(Configuration job, Properties tbl) throws SerDeException { + String alt_sep = tbl.getProperty(Constants.SERIALIZATION_FORMAT); + separator = getByteValue(alt_sep, DefaultSeparator); + + String alt_null = tbl.getProperty(Constants.SERIALIZATION_NULL_FORMAT); + nullString = getByteValue(alt_null, defaultNullString); + String columnProperty = tbl.getProperty("columns"); - if (columnProperty == null || columnProperty.length() == 0) { + String serdeName = tbl.getProperty(Constants.SERIALIZATION_LIB); + // tables that were serialized with columnsetSerDe doesn't have metadata + // so this hack applies to all such tables + boolean columnsetSerDe = false; + if ((serdeName != null) && serdeName.equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { + columnsetSerDe = true; + } + if (columnProperty == null || columnProperty.length() == 0 + || columnsetSerDe) { // Hack for tables with no columns // Treat it as a table with a single column called "col" cachedObjectInspector = ObjectInspectorFactory.getReflectionObjectInspector( @@ -104,9 +110,9 @@ public void initialize(Configuration job, Properties tbl) throws SerDeException columnNames = Arrays.asList(columnProperty.split(",")); cachedObjectInspector = MetadataListStructObjectInspector.getInstance(columnNames); } - LOG.info(getClass().getName() + ": initialized with columnNames: " + columnNames ); + LOG.debug(getClass().getName() + ": initialized with columnNames: " + columnNames + " and separator code=" + (int)separator.charAt(0) ); } - + public static Object deserialize(ColumnSet c, String row, String sep, String nullString) throws Exception { if (c.col == null) { c.col = new ArrayList(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java index e8bf59f57..c814d2c64 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java @@ -170,7 +170,7 @@ static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi) { switch(oi.getCategory()) { case PRIMITIVE: { if (o == null) { - sb.append("\\N"); + sb.append("null"); } else if (o instanceof String) { sb.append(QUOTE); sb.append(escapeString((String)o)); @@ -184,54 +184,66 @@ static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi) { break; } case LIST: { - sb.append(LBRACKET); ListObjectInspector loi = (ListObjectInspector)oi; ObjectInspector listElementObjectInspector = loi.getListElementObjectInspector(); List olist = loi.getList(o); - for (int i=0; i0) sb.append(COMMA); - buildJSONString(sb, olist.get(i), listElementObjectInspector); + if (olist == null) { + sb.append("null"); + } else { + sb.append(LBRACKET); + for (int i=0; i0) sb.append(COMMA); + buildJSONString(sb, olist.get(i), listElementObjectInspector); + } + sb.append(RBRACKET); } - sb.append(RBRACKET); break; } case MAP: { - sb.append(LBRACE); MapObjectInspector moi = (MapObjectInspector)oi; ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector(); ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector(); Map omap = moi.getMap(o); - boolean first = true; - for(Object entry : omap.entrySet()) { - if (first) { - first = false; - } else { - sb.append(COMMA); + if (omap == null) { + sb.append("null"); + } else { + sb.append(LBRACE); + boolean first = true; + for(Object entry : omap.entrySet()) { + if (first) { + first = false; + } else { + sb.append(COMMA); + } + Map.Entry e = (Map.Entry)entry; + buildJSONString(sb, e.getKey(), mapKeyObjectInspector); + sb.append(COLON); + buildJSONString(sb, e.getValue(), mapValueObjectInspector); } - Map.Entry e = (Map.Entry)entry; - buildJSONString(sb, e.getKey(), mapKeyObjectInspector); - sb.append(COLON); - buildJSONString(sb, e.getValue(), mapValueObjectInspector); + sb.append(RBRACE); } - sb.append(RBRACE); break; } case STRUCT: { - sb.append(LBRACE); StructObjectInspector soi = (StructObjectInspector)oi; List structFields = soi.getAllStructFieldRefs(); - for(int i=0; i0) { - sb.append(COMMA); + if (structFields == null) { + sb.append("null"); + } else { + sb.append(LBRACE); + for(int i=0; i0) { + sb.append(COMMA); + } + sb.append(QUOTE); + sb.append(structFields.get(i).getFieldName()); + sb.append(QUOTE); + sb.append(COLON); + buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), + structFields.get(i).getFieldObjectInspector()); } - sb.append(QUOTE); - sb.append(structFields.get(i).getFieldName()); - sb.append(QUOTE); - sb.append(COLON); - buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), - structFields.get(i).getFieldObjectInspector()); + sb.append(RBRACE); } - sb.append(RBRACE); break; } default: diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java index fb079a335..31e23a229 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java @@ -54,5 +54,4 @@ public interface Serializer { */ public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException; - public String getShortName(); } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/ThriftByteStreamTypedSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/ThriftByteStreamTypedSerDe.java index 83fd39bbf..5bef078af 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/ThriftByteStreamTypedSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/ThriftByteStreamTypedSerDe.java @@ -22,9 +22,7 @@ import java.util.Properties; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; -import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; import com.facebook.thrift.TBase; @@ -48,15 +46,6 @@ public void initialize(Configuration job, Properties tbl) throws SerDeException throw new SerDeException("ThriftByteStreamTypedSerDe is still semi-abstract"); } - public static String shortName() { - return "thriftbytestream"; - } - - public String getShortName() { - return shortName(); - } - - public ThriftByteStreamTypedSerDe(Type objectType, TProtocolFactory inFactory, TProtocolFactory outFactory) throws SerDeException { super(objectType); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java index a412b755d..bd2258f5b 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/ThriftDeserializer.java @@ -28,26 +28,6 @@ public class ThriftDeserializer implements Deserializer { - public static String shortName() { - return "thrift"; - } - - public String getShortName() { - return shortName(); - } - - static { - StackTraceElement[] sTrace = new Exception().getStackTrace(); - String className = sTrace[0].getClassName(); - try { - SerDeUtils.registerSerDe(shortName(), Class.forName(className)); - // For backward compatibility: this class replaces the following class. - SerDeUtils.registerSerDe("org.apache.hadoop.hive.serde.thrift.ThriftSerDe", Class.forName(className)); - } catch(Exception e) { - throw new RuntimeException(e); - } - } - private ThriftByteStreamTypedSerDe tsd; public ThriftDeserializer() { } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java new file mode 100644 index 000000000..0a5aed817 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java @@ -0,0 +1,186 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; + +import org.apache.hadoop.hive.serde2.thrift.ConfigurableTProtocol; + +import java.util.*; +import java.io.*; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.*; +import org.apache.hadoop.util.StringUtils; + +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.transport.*; + +public class DynamicSerDe implements SerDe, Serializable { + + public static final Log LOG = LogFactory.getLog(DynamicSerDe.class.getName()); + + private String type_name; + private DynamicSerDeStructBase bt; + + public static final String META_TABLE_NAME = "name"; + + transient private thrift_grammar parse_tree; + transient protected ByteStream.Input bis_; + transient protected ByteStream.Output bos_; + + /** + * protocols are protected in case any of their properties need to be queried from another + * class in this package. For TCTLSeparatedProtocol for example, may want to query the separators. + */ + transient protected TProtocol oprot_; + transient protected TProtocol iprot_; + + TIOStreamTransport tios; + + public void initialize(Configuration job, Properties tbl) throws SerDeException { + try { + + String ddl = tbl.getProperty(Constants.SERIALIZATION_DDL); + type_name = tbl.getProperty(META_TABLE_NAME); + String protoName = tbl.getProperty(Constants.SERIALIZATION_FORMAT); + + if(protoName == null) { + protoName = "com.facebook.thrift.protocol.TBinaryProtocol"; + } + TProtocolFactory protFactory = TReflectionUtils.getProtocolFactoryByName(protoName); + bos_ = new ByteStream.Output(); + bis_ = new ByteStream.Input(); + tios = new TIOStreamTransport(bis_,bos_); + + oprot_ = protFactory.getProtocol(tios); + iprot_ = protFactory.getProtocol(tios); + + /** + * initialize the protocols + */ + + if(oprot_ instanceof org.apache.hadoop.hive.serde2.thrift.ConfigurableTProtocol) { + ((ConfigurableTProtocol)oprot_).initialize(job, tbl); + } + + if(iprot_ instanceof org.apache.hadoop.hive.serde2.thrift.ConfigurableTProtocol) { + ((ConfigurableTProtocol)iprot_).initialize(job, tbl); + } + + // in theory the include path should come from the configuration + List include_path = new ArrayList(); + include_path.add("."); + LOG.debug("ddl=" + ddl); + this.parse_tree = new thrift_grammar(new ByteArrayInputStream(ddl.getBytes()), include_path,false); + this.parse_tree.Start(); + + this.bt = (DynamicSerDeStructBase)this.parse_tree.types.get(type_name); + + if(this.bt == null) { + this.bt = (DynamicSerDeStructBase)this.parse_tree.tables.get(type_name); + } + + if(this.bt == null) { + throw new SerDeException("Could not lookup table type " + type_name + " in this ddl: " + ddl); + } + + this.bt.initialize(); + } catch (Exception e) { + System.out.println(StringUtils.stringifyException(e)); + throw new SerDeException(e); + } + } + + Object deserializeReuse = null; + public Object deserialize(Writable field) throws SerDeException { + try { + if (field instanceof Text) { + Text b = (Text)field; + bis_.reset(b.getBytes(), b.getLength()); + } else { + BytesWritable b = (BytesWritable)field; + bis_.reset(b.get(), b.getSize()); + } + deserializeReuse = this.bt.deserialize(deserializeReuse, iprot_); + return deserializeReuse; + } catch(Exception e) { + e.printStackTrace(); + throw new SerDeException(e); + } + } + + public static ObjectInspector dynamicSerDeStructBaseToObjectInspector(DynamicSerDeTypeBase bt) throws SerDeException { + if (bt.isList()) { + return ObjectInspectorFactory.getStandardListObjectInspector( + dynamicSerDeStructBaseToObjectInspector(((DynamicSerDeTypeList)bt).getElementType())); + } else if (bt.isMap()) { + DynamicSerDeTypeMap btMap = (DynamicSerDeTypeMap)bt; + return ObjectInspectorFactory.getStandardMapObjectInspector( + dynamicSerDeStructBaseToObjectInspector(btMap.getKeyType()), + dynamicSerDeStructBaseToObjectInspector(btMap.getValueType())); + } else if (bt.isPrimitive()) { + return ObjectInspectorFactory.getStandardPrimitiveObjectInspector(bt.getRealType()); + } else { + // Must be a struct + DynamicSerDeStructBase btStruct = (DynamicSerDeStructBase)bt; + DynamicSerDeFieldList fieldList = btStruct.getFieldList(); + DynamicSerDeField[] fields = fieldList.getChildren(); + ArrayList fieldNames = new ArrayList(fields.length); + ArrayList fieldObjectInspectors = new ArrayList(fields.length); + for(int i=0; i getSerializedClass() { + return BytesWritable.class; + } + + BytesWritable ret = new BytesWritable(); + @Override + public Writable serialize(Object obj, ObjectInspector objInspector) + throws SerDeException { + try { + bos_.reset(); + this.bt.serialize(obj, objInspector, oprot_); + oprot_.getTransport().flush(); + } catch(Exception e) { + e.printStackTrace(); + throw new SerDeException(e); + } + ret.set(bos_.getData(),0,bos_.getCount()); + return ret; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java new file mode 100644 index 000000000..465147288 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import org.apache.hadoop.hive.serde2.*; +import com.facebook.thrift.TException; +import com.facebook.thrift.protocol.TProtocol; + +public class DynamicSerDeField extends DynamicSerDeSimpleNode { + + + // production is: + // [this.fieldid :] Requiredness() FieldType() this.name FieldValue() [CommaOrSemicolon()] + + private final int FD_REQUIREDNESS = 0; + private final int FD_FIELD_TYPE = 1; + private final int FD_FIELD_VALUE =2; + + public DynamicSerDeFieldType getFieldType() { + return (DynamicSerDeFieldType)this.jjtGetChild(FD_FIELD_TYPE); + } + + public DynamicSerDeField(int i) { + super(i); + } + public DynamicSerDeField(thrift_grammar p, int i) { + super(p,i); + } + +} + diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java new file mode 100644 index 000000000..dc0547a14 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java @@ -0,0 +1,214 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType.*; + +public class DynamicSerDeFieldList extends DynamicSerDeSimpleNode implements Serializable { + + // private void writeObject(ObjectOutputStream out) throws IOException { + // out.writeObject(types_by_column_name); + // out.writeObject(ordered_types); + // } + + // production: Field()* + + // mapping of the fieldid to the field + private Map types_by_id = null; + private Map types_by_column_name = null; + private DynamicSerDeTypeBase ordered_types[] = null; + + private Map ordered_column_id_by_name = null; + + public DynamicSerDeFieldList(int i) { + super(i); + } + + public DynamicSerDeFieldList(thrift_grammar p, int i) { + super(p,i); + } + + private DynamicSerDeField getField(int i) { + return (DynamicSerDeField)this.jjtGetChild(i); + } + + final public DynamicSerDeField [] getChildren() { + int size = this.jjtGetNumChildren(); + DynamicSerDeField result [] = new DynamicSerDeField[size]; + for(int i = 0; i < size; i++) { + result[i] = (DynamicSerDeField)this.jjtGetChild(i); + } + return result; + } + + private int getNumFields() { + return this.jjtGetNumChildren(); + } + + public void initialize() { + if(types_by_id == null) { + // multiple means of lookup + types_by_id = new HashMap (); + types_by_column_name = new HashMap (); + ordered_types = new DynamicSerDeTypeBase[this.jjtGetNumChildren()]; + ordered_column_id_by_name = new HashMap(); + + // put them in and also roll them up while we're at it + // a Field contains a FieldType which in turn contains a type + for(int i = 0 ; i < this.jjtGetNumChildren(); i++) { + DynamicSerDeField mt = this.getField(i); + DynamicSerDeTypeBase type = mt.getFieldType().getMyType(); + type.fieldid = mt.fieldid; + type.name = mt.name; + + types_by_id.put(Integer.valueOf(mt.fieldid) , type); + types_by_column_name.put(mt.name, type); + ordered_types[i] = type; + ordered_column_id_by_name.put(mt.name, i); + } + } + } + + private DynamicSerDeTypeBase getFieldByFieldId(int i) { + return types_by_id.get(i); + } + + protected DynamicSerDeTypeBase getFieldByName(String fieldname) { + return types_by_column_name.get(fieldname); + } + + + public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + ArrayList struct = null; + + if (reuse == null) { + struct = new ArrayList(this.getNumFields()); + for(int i=0; i) reuse; + assert(struct.size() == this.getNumFields()); + } + + // Read the fields. + for(int i = 0; i < this.getNumFields(); i++) { + DynamicSerDeTypeBase mt = null; + TField field = null; + + if (thrift_mode) { + field = iprot.readFieldBegin(); + + if(field.type >= 0) { + if(field.type == TType.STOP) { + break; + } + mt = this.getFieldByFieldId(field.id); + if(mt == null) { + System.err.println("ERROR for fieldid: " + field.id + " system has no knowledge of this field which is of type : " + field.type); + TProtocolUtil.skip(iprot,field.type); + continue; + } + } + } + + // field.type < 0 means that this is a faked Thrift field, e.g., TControlSeparatedProtocol, which does not + // serialize the field id in the stream. As a result, the only way to get the field id is to fall back to + // the position "i". + // The intention of this hack (field.type < 0) is to make TControlSeparatedProtocol a real Thrift prototype, + // but there are a lot additional work to do to fulfill that, and that protocol inherently does not support + // versioning (adding/deleting fields). + int orderedId = -1; + if (!thrift_mode || field.type < 0) { + mt = this.ordered_types[i]; + // We don't need to lookup order_column_id_by_name because we know it must be "i". + orderedId = i; + } else { + // Set the correct position + orderedId = ordered_column_id_by_name.get(mt.name); + } + struct.set(orderedId, mt.deserialize(struct.get(orderedId), iprot)); + + if(thrift_mode) { + iprot.readFieldEnd(); + } + } + return struct; + } + + + TField field = new TField(); + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + + // Assuming the ObjectInspector represents exactly the same type as this struct. + // This assumption should be checked during query compile time. + assert(oi instanceof StructObjectInspector); + StructObjectInspector soi = (StructObjectInspector) oi; + + // For every field + List fields = soi.getAllStructFieldRefs(); + if (fields.size() != ordered_types.length) { + throw new SerDeException("Trying to serialize " + fields.size() + + " fields into a struct with " + ordered_types.length); + } + for (int i=0; i> with the String being the field name. + * + */ + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + if(thrift_mode) { + oprot.writeStructBegin(new TStruct(this.name)); + } + + fieldList.serialize(o, oi, oprot); + + if(thrift_mode) { + oprot.writeFieldStop(); + oprot.writeStructEnd(); + } + } + +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBase.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBase.java new file mode 100644 index 000000000..5998f03f9 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBase.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +import com.facebook.thrift.protocol.TProtocol; +import com.facebook.thrift.TException; +import java.io.Serializable; + +public abstract class DynamicSerDeTypeBase extends DynamicSerDeSimpleNode implements Serializable { + private static final long serialVersionUID = 1L; + + public DynamicSerDeTypeBase(int i) { + super(i); + } + + public DynamicSerDeTypeBase(thrift_grammar p, int i) { + super(p,i); + } + + public Class getRealType() throws SerDeException { + throw new SerDeException("Not implemented in base"); + } + + public Object get(Object obj) { + throw new RuntimeException("Not implemented in base"); + } + + public abstract Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException; + + public abstract void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException; + + public String toString() { + return "BAD"; + } + + public byte getType() { + return -1; + } + + public boolean isPrimitive() { return true; } + public boolean isList() { return false; } + public boolean isMap() { return false; } + +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBool.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBool.java new file mode 100644 index 000000000..090802970 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeBool.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.protocol.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypeBool extends DynamicSerDeTypeBase { + + // production is: bool + + public DynamicSerDeTypeBool(int i) { + super(i); + } + + public DynamicSerDeTypeBool(thrift_grammar p, int i) { + super(p, i); + } + + public String toString() { + return "bool"; + } + + @Override + public Object deserialize(Object reuse, TProtocol iprot) + throws SerDeException, TException, IllegalAccessException { + return Boolean.valueOf(iprot.readBool()); + } + + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) + throws TException, SerDeException, NoSuchFieldException, + IllegalAccessException { + assert (oi.getCategory() == ObjectInspector.Category.PRIMITIVE); + assert (((PrimitiveObjectInspector) oi).getPrimitiveClass() + .equals(Boolean.class)); + oprot.writeBool((Boolean) o); + } + + public byte getType() { + return TType.BOOL; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeByte.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeByte.java new file mode 100644 index 000000000..f4d926c7f --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeByte.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + + +import com.facebook.thrift.TException; +import com.facebook.thrift.protocol.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypeByte extends DynamicSerDeTypeBase { + + // production is: byte + + + public DynamicSerDeTypeByte(int i) { + super(i); + } + public DynamicSerDeTypeByte(thrift_grammar p, int i) { + super(p,i); + } + + public String toString() { return "byte"; } + + public Byte deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + return Byte.valueOf(iprot.readByte()); + } + public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + return Byte.valueOf(iprot.readByte()); + } + + public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + // bugbug need to use object of byte type!!! + oprot.writeByte((Byte)s); + } + + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + assert(oi.getCategory() == ObjectInspector.Category.PRIMITIVE); + assert(((PrimitiveObjectInspector)oi).getPrimitiveClass().equals(Byte.class)); + oprot.writeByte((Byte)o); + } + + public byte getType() { + return TType.BYTE; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDouble.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDouble.java new file mode 100644 index 000000000..88dad317f --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeDouble.java @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypeDouble extends DynamicSerDeTypeBase { + + // production is: double + + public DynamicSerDeTypeDouble(int i) { + super(i); + } + public DynamicSerDeTypeDouble(thrift_grammar p, int i) { + super(p,i); + } + + public String toString() { return "double"; } + + public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + return Double.valueOf(iprot.readDouble()); + } + + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + assert(oi.getCategory() == ObjectInspector.Category.PRIMITIVE); + assert(((PrimitiveObjectInspector)oi).getPrimitiveClass().equals(Double.class)); + oprot.writeDouble((Double)o); + } + + public byte getType() { + return TType.DOUBLE; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeList.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeList.java new file mode 100644 index 000000000..17b138e57 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeList.java @@ -0,0 +1,113 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.protocol.*; +import java.util.*; +import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypeList extends DynamicSerDeTypeBase { + + public boolean isPrimitive() { return false; } + public boolean isList() { return true; } + + // production is: list + + static final private int FD_TYPE = 0; + + public Class getRealType() { + return java.util.ArrayList.class; + } + + public DynamicSerDeTypeList(int i) { + super(i); + } + public DynamicSerDeTypeList(thrift_grammar p, int i) { + super(p,i); + } + + public DynamicSerDeTypeBase getElementType() { + return (DynamicSerDeTypeBase)((DynamicSerDeFieldType)this.jjtGetChild(FD_TYPE)).getMyType(); + } + + public String toString() { + return Constants.LIST_TYPE_NAME + "<" + this.getElementType().toString() + ">"; + } + + @Override + public ArrayList deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + TList thelist = iprot.readListBegin(); + ArrayList deserializeReuse; + if (reuse != null) { + deserializeReuse = (ArrayList)reuse; + // Trim to the size needed + while (deserializeReuse.size() > thelist.size) { + deserializeReuse.remove(deserializeReuse.size()-1); + } + } else { + deserializeReuse = new ArrayList(); + } + deserializeReuse.ensureCapacity(thelist.size); + for(int i = 0; i < thelist.size; i++) { + if (i+1 > deserializeReuse.size()) { + deserializeReuse.add(this.getElementType().deserialize(null, iprot)); + } else { + deserializeReuse.set(i, + this.getElementType().deserialize(deserializeReuse.get(i), iprot)); + } + } + // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy + iprot.readListEnd(); + return deserializeReuse; + } + + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + ListObjectInspector loi = (ListObjectInspector)oi; + ObjectInspector elementObjectInspector = loi.getListElementObjectInspector(); + DynamicSerDeTypeBase mt = this.getElementType(); + + if (o instanceof List) { + List list = (List)o; + oprot.writeListBegin(new TList(mt.getType(),list.size())); + for (Object element: list) { + mt.serialize(element, elementObjectInspector, oprot); + } + } else { + Object[] list = (Object[])o; + oprot.writeListBegin(new TList(mt.getType(),list.length)); + for (Object element: list) { + mt.serialize(element, elementObjectInspector, oprot); + } + } + // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy + oprot.writeListEnd(); + } + + public byte getType() { + return TType.LIST; + } + +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java new file mode 100644 index 000000000..e4455f4ea --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java @@ -0,0 +1,129 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypeMap extends DynamicSerDeTypeBase { + + public boolean isPrimitive() { return false; } + public boolean isMap() { return true;} + + // production is: Map + + private final byte FD_KEYTYPE = 0; + private final byte FD_VALUETYPE = 1; + + // returns Map + public Class getRealType() { + try { + Class c = this.getKeyType().getRealType(); + Class c2 = this.getValueType().getRealType(); + Object o = c.newInstance(); + Object o2 = c2.newInstance(); + Map l = Collections.singletonMap(o,o2); + return l.getClass(); + } catch (Exception e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + public DynamicSerDeTypeMap(int i) { + super(i); + } + + public DynamicSerDeTypeMap(thrift_grammar p, int i) { + super(p,i); + } + + public DynamicSerDeTypeBase getKeyType() { + return (DynamicSerDeTypeBase)((DynamicSerDeFieldType)this.jjtGetChild(FD_KEYTYPE)).getMyType(); + } + + public DynamicSerDeTypeBase getValueType() { + return (DynamicSerDeTypeBase)((DynamicSerDeFieldType)this.jjtGetChild(FD_VALUETYPE)).getMyType(); + } + + public String toString() { + return "map<" + this.getKeyType().toString() + "," + this.getValueType().toString() + ">"; + } + + public Map deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + HashMap deserializeReuse; + if (reuse != null) { + deserializeReuse = (HashMap)reuse; + deserializeReuse.clear(); + } else { + deserializeReuse = new HashMap(); + } + TMap themap = iprot.readMapBegin(); + for(int i = 0; i < themap.size; i++) { + Object key = this.getKeyType().deserialize(null, iprot); + Object value = this.getValueType().deserialize(null, iprot); + deserializeReuse.put(key,value); + } + + // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy + iprot.readMapEnd(); + return deserializeReuse; + } + + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) + throws TException, SerDeException, NoSuchFieldException, + IllegalAccessException { + DynamicSerDeTypeBase keyType = this.getKeyType(); + DynamicSerDeTypeBase valueType = this.getValueType(); + + assert(oi.getCategory() == ObjectInspector.Category.MAP); + MapObjectInspector moi = (MapObjectInspector)oi; + ObjectInspector koi = moi.getMapKeyObjectInspector(); + ObjectInspector voi = moi.getMapValueObjectInspector(); + + Map map = moi.getMap(o); + oprot.writeMapBegin(new TMap(keyType.getType(),valueType.getType(),map.size())); + for(Iterator i = map.entrySet().iterator(); i.hasNext(); ) { + Map.Entry it = (Map.Entry)i.next(); + Object key = it.getKey(); + Object value = it.getValue(); + keyType.serialize(key, koi, oprot); + valueType.serialize(value, voi, oprot); + } + // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy + oprot.writeMapEnd(); + } + + public byte getType() { + return TType.MAP; + } +}; + diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java new file mode 100644 index 000000000..d80016b5f --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java @@ -0,0 +1,115 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypeSet extends DynamicSerDeTypeBase { + + // production is: set + + static final private int FD_TYPE = 0; + + public DynamicSerDeTypeSet(int i) { + super(i); + } + public DynamicSerDeTypeSet(thrift_grammar p, int i) { + super(p,i); + } + + // returns Set + public Class getRealType() { + try { + Class c = this.getElementType().getRealType(); + Object o = c.newInstance(); + Set l = Collections.singleton(o); + return l.getClass(); + } catch (Exception e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + public DynamicSerDeTypeBase getElementType() { + return (DynamicSerDeTypeBase)((DynamicSerDeFieldType)this.jjtGetChild(FD_TYPE)).getMyType(); + } + + public String toString() { + return "set<" + this.getElementType().toString() + ">"; + } + + public byte getType() { + return TType.SET; + } + + /** NOTE: Set is not supported by Hive yet. + */ + @Override + public Object deserialize(Object reuse, TProtocol iprot) + throws SerDeException, TException, IllegalAccessException { + TSet theset = iprot.readSetBegin(); + Set result; + if (reuse != null) { + result = (Set)reuse; + result.clear(); + } else { + result = new HashSet(); + } + for(int i = 0; i < theset.size; i++) { + Object elem = this.getElementType().deserialize(null, iprot); + result.add(elem); + } + // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy + iprot.readSetEnd(); + return result; + } + + /** NOTE: Set is not supported by Hive yet. + * The code uses ListObjectInspector right now. We need to change it to + * SetObjectInspector when that is done. + */ + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) + throws TException, SerDeException, NoSuchFieldException, + IllegalAccessException { + + ListObjectInspector loi = (ListObjectInspector)oi; + + Set set = (Set)o; + DynamicSerDeTypeBase mt = this.getElementType(); + oprot.writeSetBegin(new TSet(mt.getType(),set.size())); + for(Object element: set) { + mt.serialize(element, loi.getListElementObjectInspector(), oprot); + } + // in theory, the below call isn't needed in non thrift_mode, but let's not get too crazy + oprot.writeSetEnd(); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeString.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeString.java new file mode 100644 index 000000000..84968e1a0 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeString.java @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType.*; + +public class DynamicSerDeTypeString extends DynamicSerDeTypeBase { + + // production is: string + + public DynamicSerDeTypeString(int i) { + super(i); + } + public DynamicSerDeTypeString(thrift_grammar p, int i) { + super(p,i); + } + public Class getRealType() { return java.lang.String.class; } + + public String toString() { return "string"; } + + public String deserialize(TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + return String.valueOf(iprot.readString()); + } + + @Override + public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + return String.valueOf(iprot.readString()); + } + + public void serialize(Object s, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + oprot.writeString((String)s); + } + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + assert(oi.getCategory() == ObjectInspector.Category.PRIMITIVE); + assert(((PrimitiveObjectInspector)oi).getPrimitiveClass().equals(String.class)); + oprot.writeString((String)o); + } + + public byte getType() { + return TType.STRING; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypedef.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypedef.java new file mode 100644 index 000000000..dbc8d923a --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypedef.java @@ -0,0 +1,81 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType.*; + +public class DynamicSerDeTypedef extends DynamicSerDeTypeBase { + + // production is: typedef DefinitionType() this.name + + private final static int FD_DEFINITION_TYPE = 0; + + public DynamicSerDeTypedef(int i) { + super(i); + } + public DynamicSerDeTypedef(thrift_grammar p, int i) { + super(p,i); + } + + private DynamicSerDeSimpleNode getDefinitionType() { + return (DynamicSerDeSimpleNode)this.jjtGetChild(FD_DEFINITION_TYPE); + } + + + public DynamicSerDeTypeBase getMyType() { + DynamicSerDeSimpleNode child = this.getDefinitionType(); + DynamicSerDeTypeBase ret = (DynamicSerDeTypeBase)child.jjtGetChild(0); + return ret; + } + + public String toString() { + String result = "typedef " + this.name + "("; + result += this.getDefinitionType().toString(); + result += ")"; + return result; + } + + public byte getType() { + throw new RuntimeException("not implemented"); + } + + @Override + public Object deserialize(Object reuse, TProtocol iprot) + throws SerDeException, TException, IllegalAccessException { + throw new RuntimeException("not implemented"); + } + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) + throws TException, SerDeException, NoSuchFieldException, + IllegalAccessException { + throw new RuntimeException("not implemented"); + } + +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei16.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei16.java new file mode 100644 index 000000000..fe8f8e379 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei16.java @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypei16 extends DynamicSerDeTypeBase { + + public Class getRealType() { return Integer.valueOf(2).getClass(); } + + // production is: i16 + + public DynamicSerDeTypei16(int i) { + super(i); + } + public DynamicSerDeTypei16(thrift_grammar p, int i) { + super(p,i); + } + + public String toString() { return "i16"; } + + @Override + public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + return Integer.valueOf(iprot.readI16()); + } + + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + assert(oi.getCategory() == ObjectInspector.Category.PRIMITIVE); + assert(((PrimitiveObjectInspector)oi).getPrimitiveClass().equals(Short.class)); + oprot.writeI16((Short)o); + } + + public byte getType() { + return TType.I16; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei32.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei32.java new file mode 100644 index 000000000..34000e74f --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei32.java @@ -0,0 +1,67 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypei32 extends DynamicSerDeTypeBase { + + // production is: i32 + + public DynamicSerDeTypei32(int i) { + super(i); + } + public DynamicSerDeTypei32(thrift_grammar p, int i) { + super(p,i); + } + + public String toString() { return "i32"; } + + @Override + public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + return Integer.valueOf(iprot.readI32()); + } + + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + assert(oi.getCategory() == ObjectInspector.Category.PRIMITIVE); + assert(((PrimitiveObjectInspector)oi).getPrimitiveClass().equals(Integer.class)); + oprot.writeI32((Integer)o); + } + + public Class getRealType() { return java.lang.Integer.class; } + public Integer getRealTypeInstance() { return Integer.valueOf(0); } + + + public byte getType() { + return TType.I32; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei64.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei64.java new file mode 100644 index 000000000..67300e614 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypei64.java @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import com.facebook.thrift.TException; +import com.facebook.thrift.TApplicationException; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.server.*; +import com.facebook.thrift.transport.*; +import java.util.*; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +import java.lang.reflect.*; +import com.facebook.thrift.protocol.TType; + +public class DynamicSerDeTypei64 extends DynamicSerDeTypeBase { + + public Class getRealType() { return Long.valueOf(0).getClass(); } + + // production is: i64 + + public DynamicSerDeTypei64(int i) { + super(i); + } + public DynamicSerDeTypei64(thrift_grammar p, int i) { + super(p,i); + } + + public String toString() { return "i64"; } + + @Override + public Object deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { + return Long.valueOf(iprot.readI64()); + } + + @Override + public void serialize(Object o, ObjectInspector oi, TProtocol oprot) throws TException, SerDeException, NoSuchFieldException,IllegalAccessException { + assert(oi.getCategory() == ObjectInspector.Category.PRIMITIVE); + assert(((PrimitiveObjectInspector)oi).getPrimitiveClass().equals(Long.class)); + oprot.writeI64((Long)o); + } + + public byte getType() { + return TType.I64; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jjt b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jjt new file mode 100644 index 000000000..c3fe253ed --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.jjt @@ -0,0 +1,866 @@ +options { + MULTI=true; + STATIC = false; + NODE_PREFIX = "DynamicSerDe"; +} + + +PARSER_BEGIN(thrift_grammar) + +package org.apache.hadoop.hive.serde2.dynamic_type; + +import java.util.*; +import java.io.*; +import java.net.*; +import com.facebook.thrift.protocol.*; +import com.facebook.thrift.transport.*; +import org.apache.hadoop.hive.serde2.dynamic_type.*; + +public class thrift_grammar { + + private List include_path = null; + + // for computing the autogenerated field ids in thrift + private int field_val; + + // store types and tables + // separately because one cannot use a table (ie service.method) as a Struct like type. + protected Map types; + protected Map tables; + + // system include path + final private static String default_include_path[] = { "/usr/local/include","/usr/include","/usr/local/include/thrift/if","/usr/local/include/fb303/if" }; + + // need three params to differentiate between this and 2 param method auto generated since + // some calls in the autogenerated code use null param for 2nd param and thus ambiguous. + protected thrift_grammar(InputStream is, List include_path, boolean junk) { + this(is,null); + this.types = new HashMap () ; + this.tables = new HashMap () ; + this.include_path = include_path; + this.field_val = -1; + } + + // find the file on the include path + private static File findFile(String fname, List include_path) { + for(String path: include_path) { + final String full = path + "/" + fname; + File f = new File(full); + if(f.exists()) { + return f; + } + } + return null; + } + + public static void main(String args[]) { + String filename = null; + List include_path = new ArrayList(); + + for(String path: default_include_path) { + include_path.add(path); + } + for(int i = 0; i < args.length; i++) { + String arg = args[i]; + if(arg.equals("--include") && i + 1 < args.length) { + include_path.add(args[++i]); + } + if(arg.equals("--file") && i + 1 < args.length) { + filename = args[++i]; + } + } + + InputStream is = System.in; + if(filename != null) { + try { + is = new FileInputStream(findFile(filename, include_path)); + } catch(IOException e) { + } + } + thrift_grammar t = new thrift_grammar(is,include_path,false); + + try { + t.Start(); + } catch (Exception e) { + System.out.println("Parse error."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } +} + +PARSER_END(thrift_grammar) + + + +SKIP : +{ + " " +| "\t" +| "\n" +| "\r" +| <"#"(~["\n"])* ("\n"|"\r"|"\r\n")> +| <"//" (~["\n","\r"])* ("\n"|"\r"|"\r\n")> +| <"/*" (~["*"])* "*" (~["/"] (~["*"])* "*")* "/"> +} + + +/** + * HELPER DEFINITIONS, COMMENTS, CONSTANTS, AND WHATNOT + */ + +TOKEN: +{ +| + | +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| +| + | + | + | + | + | + | + | + | + | + | + | + | + +} + +TOKEN: { + + +| +)*"."()+(["e","E"](["+","-"])?()+)?> +| +(||"."|"_")*> +| +<#LETTER: (["a"-"z", "A"-"Z" ]) > +| +<#DIGIT: ["0"-"9"] > +| + +| + +} + + +SimpleNode Start() : {} +{ + HeaderList() (Definition())+ + { + return jjtThis; + } +} + +SimpleNode HeaderList() : {} +{ + (Header())* + { + return jjtThis; + } + +} + +SimpleNode Header() : {} +{ + Include() + { + return jjtThis; + } +| Namespace() + { + return jjtThis; + } +} + +SimpleNode Namespace() : {} +{ + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +| + +{ + return jjtThis; +} +} + + +SimpleNode Include() : { + String fname; + boolean found = false; +} +{ + + fname=.image +{ + // bugbug somewhat fragile below substring expression + fname = fname.substring(1,fname.length() - 1); + + // try to find the file on the include path + File f = thrift_grammar.findFile(fname, this.include_path); + if(f != null) { + found = true; + try { + FileInputStream fis = new FileInputStream(f); + thrift_grammar t = new thrift_grammar(fis,this.include_path, false); + t.Start(); + fis.close(); + found = true; + // add in what we found to our type and table tables. + this.tables.putAll(t.tables); + this.types.putAll(t.types); + } catch (Exception e) { + System.out.println("File: " + fname + " - Oops."); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } + if(!found) { + throw new RuntimeException("include file not found: " + fname); + } + return jjtThis; +} +} + + +SimpleNode Definition() : {} +{ + Const() + { + return jjtThis; + } +| Service() + { + return jjtThis; + } +| TypeDefinition() + { + return jjtThis; + } +} + +SimpleNode TypeDefinition() : {} +{ + Typedef() + { + return jjtThis; + } +| Enum() + { + return jjtThis; + } +| Senum() + { + return jjtThis; + } +| Struct() + { + return jjtThis; + } +| Xception() + { + return jjtThis; + } + +} + +DynamicSerDeTypedef Typedef() : {} +{ + + DefinitionType() + jjtThis.name = .image + { + // store the type for later retrieval + this.types.put(jjtThis.name, jjtThis); + return jjtThis; + } +} + + +// returning void because we ignore this production. +void CommaOrSemicolon() : {} +{ + "," +| + ";" +{ +} +} + +SimpleNode Enum() : {} +{ + "{" EnumDefList() "}" + { + return jjtThis; + } +} + +SimpleNode EnumDefList() : {} +{ + (EnumDef())+ + { + return jjtThis; + } +} + +SimpleNode EnumDef() : {} +{ + ["=" ] [CommaOrSemicolon()] + { + return jjtThis; + } +} + +SimpleNode Senum() : {} +{ + "{" SenumDefList() "}" + { + return jjtThis; + } +} + +SimpleNode SenumDefList() : {} +{ + (SenumDef())+ + { + return jjtThis; + } +} + +SimpleNode SenumDef() : {} +{ + [CommaOrSemicolon()] + { + return jjtThis; + } +} + + +SimpleNode Const() : {} +{ + FieldType() "=" ConstValue() [CommaOrSemicolon()] + { + return jjtThis; + } +} + +SimpleNode ConstValue() : {} +{ + + { + } +| + { + } +| + { + } +| + { + } +| ConstList() + { + } +| ConstMap() + { + return jjtThis; + } +} + +SimpleNode ConstList() : {} +{ + "[" ConstListContents() "]" + { + return jjtThis; + } +} + +SimpleNode ConstListContents() : {} +{ + (ConstValue() [CommaOrSemicolon()])+ + { + return jjtThis; + } +} + +SimpleNode ConstMap() : {} +{ + "{" ConstMapContents() "}" + { + return jjtThis; + } +} + +SimpleNode ConstMapContents() : {} +{ + (ConstValue() ":" ConstValue() [CommaOrSemicolon()])+ + { + } +| + { + return jjtThis; + } +} + +DynamicSerDeStruct Struct() : { + +} +{ + + jjtThis.name = .image + "{" + FieldList() + "}" + { + this.types.put(jjtThis.name,jjtThis); + return jjtThis; + } +} + + +SimpleNode Xception() : {} +{ + "{" FieldList() "}" + { + return jjtThis; + } +} + + +SimpleNode Service() : {} +{ + + + Extends() + "{" + FlagArgs() + (Function())+ + UnflagArgs() + "}" + { + // at some point, these should be inserted as a "db" + return jjtThis; + } +} + +SimpleNode FlagArgs() : {} +{ + { + return jjtThis; + } +} + +SimpleNode UnflagArgs() : {} +{ + { + return jjtThis; + } +} + +SimpleNode Extends() : {} +{ + + { + return jjtThis; + } +| + { + return jjtThis; + } +} + + +DynamicSerDeFunction Function() : {} +{ + // metastore ignores async and type + Async() + FunctionType() + + // the name of the function/table + jjtThis.name = .image + "(" + FieldList() + ")" + Throws() + [CommaOrSemicolon()] + + { + this.tables.put(jjtThis.name, jjtThis); + return jjtThis; + } +} + +void Async() : {} +{ + +| +{} +} + +void Throws() : {} +{ + "(" FieldList() ")" +| +{} +} + + +// nothing special - just use the DynamicSerDeFieldList's children methods to access the fields +DynamicSerDeFieldList FieldList() : { + this.field_val = -1; +} +{ + (Field())* { + return jjtThis; + } +} + + +DynamicSerDeField Field() : { + + String fidnum = ""; + String fid; +} +{ + + // parse the field id which is optional + [fidnum=.image ":"] + + // is this field required or optional? default is optional + FieldRequiredness() + + // field type - obviously not optional + FieldType() + + // the name of the field - not optional + jjtThis.name = .image + + // does it have = some value? + FieldValue() + + // take it or leave it + [CommaOrSemicolon()] + + { + if(fidnum.length() > 0) { + int fidInt = Integer.valueOf(fidnum); + jjtThis.fieldid = fidInt; + } else { + jjtThis.fieldid = this.field_val--; + } + return jjtThis; + } +} + + + +SimpleNode FieldRequiredness() : {} +{ + + { + return jjtThis; + } +| + { + return jjtThis; + } +| + { + return jjtThis; + } +} + +SimpleNode FieldValue() : {} +{ + "=" + ConstValue() + { + return jjtThis; + } +| +{ + return jjtThis; +} +} + +SimpleNode DefinitionType() : {} +{ +// BaseType() xxx + TypeString() + { + return jjtThis; + } +| TypeBool() + { + return jjtThis; + } +| Typei16() + { + return jjtThis; + } +| Typei32() + { + return jjtThis; + } +| Typei64() + { + return jjtThis; + } +| TypeDouble() + { + return jjtThis; + } +| TypeMap() + { + return jjtThis; + } +| TypeSet() + { + return jjtThis; + } +| TypeList() + { + return jjtThis; + } +} + +void FunctionType() : {} +{ + FieldType() +| +{} +} + +DynamicSerDeFieldType FieldType() : { +} + +{ + TypeString() + { + return jjtThis; + } +| TypeBool() + { + return jjtThis; + } +| Typei16() + { + return jjtThis; + } +| Typei32() + { + return jjtThis; + } +| Typei64() + { + return jjtThis; + } +| TypeDouble() + { + return jjtThis; + } +| + TypeMap() + { + return jjtThis; + } +| + TypeSet() + { + return jjtThis; + } +| + TypeList() + { + return jjtThis; + } +| + jjtThis.name = .image + { + return jjtThis; + } +} + +DynamicSerDeTypeString TypeString() : {} +{ + + { + return jjtThis; + } +} + +DynamicSerDeTypeByte TypeByte() : { +} +{ + + { + return jjtThis; + } +} + +DynamicSerDeTypei16 Typei16() : { +} +{ + + { + return jjtThis; + } +} + +DynamicSerDeTypei32 Typei32() : {} +{ + + { + return jjtThis; + } +} + +DynamicSerDeTypei64 Typei64() : {} +{ + + { + return jjtThis; + } +} + +DynamicSerDeTypeDouble TypeDouble() : {} +{ + + { + return jjtThis; + } +} + +DynamicSerDeTypeBool TypeBool() : {} +{ + + { + return jjtThis; + } +} + +DynamicSerDeTypeMap TypeMap() : {} +{ + + "<" + FieldType() + "," + FieldType() + ">" + { + return jjtThis; + } +} + +DynamicSerDeTypeSet TypeSet() : {} +{ + + "<" + + FieldType() + + ">" + { + return jjtThis; + } +} + +DynamicSerDeTypeList TypeList() : {} +{ + + "<" + + FieldType() + + ">" + { + return jjtThis; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ReflectionStructObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ReflectionStructObjectInspector.java index 796cbf88f..5c77144da 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ReflectionStructObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ReflectionStructObjectInspector.java @@ -116,11 +116,16 @@ public Object getStructFieldData(Object data, StructField fieldRef) { if (data == null) { return null; } + if (!(fieldRef instanceof MyField)) { + throw new RuntimeException("fieldRef has to be of MyField"); + } + MyField f = (MyField) fieldRef; try { - MyField f = (MyField) fieldRef; - return f.field.get(data); + Object r = f.field.get(data); + return r; } catch (Exception e) { - throw new RuntimeException(e); + throw new RuntimeException("cannot get field " + f.field + " from " + + data.getClass() + " " + data); } } public List getStructFieldsDataAsList(Object data) { diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ConfigurableTProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ConfigurableTProtocol.java new file mode 100644 index 000000000..a501cc5ae --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ConfigurableTProtocol.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.thrift; + +import org.apache.hadoop.conf.Configuration; +import java.util.Properties; +import com.facebook.thrift.TException; + +/** + * An interface for TProtocols that need to have properties passed in to + * initialize them. e.g., separators for TCTLSeparatedProtocol. + * If there was a regex like deserializer, the regex could be passed in + * in this manner. + */ +public interface ConfigurableTProtocol { + /** + * Initialize the TProtocol + * @param conf System properties + * @param tbl table properties + * @throws TException + */ + public void initialize(Configuration conf, Properties tbl) throws TException; + +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java new file mode 100644 index 000000000..71727ddb3 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java @@ -0,0 +1,576 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.hadoop.hive.serde2.thrift; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde.Constants; +import com.facebook.thrift.TException; +import com.facebook.thrift.transport.*; +import com.facebook.thrift.*; +import com.facebook.thrift.protocol.*; +import java.util.*; +import java.util.regex.Pattern; +import java.io.*; +import org.apache.hadoop.conf.Configuration; +import java.util.Properties; + +/** + * + * An implementation of the Thrift Protocol for ctl separated + * records. + * This is not thrift compliant in that it doesn't write out field ids + * so things cannot actually be versioned. + */ +public class TCTLSeparatedProtocol extends TProtocol implements ConfigurableTProtocol { + + final static Log LOG = LogFactory.getLog(TCTLSeparatedProtocol.class.getName()); + + /** + * Factory for JSON protocol objects + */ + public static class Factory implements TProtocolFactory { + + public TProtocol getProtocol(TTransport trans) { + return new TCTLSeparatedProtocol(trans); + } + + } + + /** + * These are defaults, but for now leaving them like this + */ + final static protected byte defaultPrimarySeparatorByte = 1; + final static protected byte defaultSecondarySeparatorByte = 2; + final static protected byte defaultRowSeparatorByte = (byte)'\n'; + final static protected byte defaultMapSeparatorByte = 3; + + /** + * The separators for this instance + */ + protected byte primarySeparatorByte; + protected byte secondarySeparatorByte; + protected byte rowSeparatorByte; + protected byte mapSeparatorByte; + protected Pattern primaryPattern; + protected Pattern secondaryPattern; + protected Pattern mapPattern; + + /** + * Inspect the separators this instance is configured with. + */ + public byte getPrimarySeparator() { return primarySeparatorByte; } + public byte getSecondarySeparator() { return secondarySeparatorByte; } + public byte getRowSeparator() { return rowSeparatorByte; } + public byte getMapSeparator() { return mapSeparatorByte; } + + + /** + * The transport stream is tokenized on the row separator + */ + protected SimpleTransportTokenizer transportTokenizer; + + /** + * For a single row, the split on the primary separator + */ + protected String columns[]; + + /** + * An index into what column we're on + */ + + protected int index; + + /** + * For a single column, a split on the secondary separator + */ + protected String fields[]; + + /** + * An index into what field within a column we're on + */ + protected int innerIndex; + + + /** + * Is this the first field we're writing + */ + protected boolean firstField; + + /** + * Is this the first list/map/set field we're writing for the current element + */ + protected boolean firstInnerField; + + + /** + * Are we writing a map and need to worry about k/v separator? + */ + protected boolean isMap; + + + /** + * For writes, on what element are we on so we know when to use normal list separator or + * for a map know when to use the k/v separator + */ + protected long elemIndex; + + + /** + * Are we currently on the top-level columns or parsing a column itself + */ + protected boolean inner; + + + /** + * For places where the separators are back to back, should we return a null or an empty string since it is ambiguous. + * This also applies to extra columns that are read but aren't in the current record. + */ + protected boolean returnNulls; + + /** + * The transport being wrapped. + * + */ + final protected TTransport innerTransport; + + + /** + * Strings used to lookup the various configurable paramaters of this protocol. + */ + public final static String ReturnNullsKey = "separators.return_nulls"; + public final static String BufferSizeKey = "separators.buffer_size"; + + /** + * The size of the internal buffer to use. + */ + protected int bufferSize; + + /** + * A convenience class for tokenizing a TTransport + */ + + class SimpleTransportTokenizer { + + TTransport trans; + StringTokenizer tokenizer; + final String separator; + byte buf[]; + + public SimpleTransportTokenizer(TTransport trans, byte separator, int buffer_length) { + this.trans = trans; + byte [] separators = new byte[1]; + separators[0] = separator; + this.separator = new String(separators); + buf = new byte[buffer_length]; + fillTokenizer(); + } + + private boolean fillTokenizer() { + try { + int length = trans.read(buf, 0, buf.length); + if(length <=0 ) { + tokenizer = new StringTokenizer("", separator, true); + return false; + } + String row = new String(buf, 0, length); + tokenizer = new StringTokenizer(row, new String(separator), true); + } catch(TTransportException e) { + e.printStackTrace(); + tokenizer = null; + return false; + } + return true; + } + + public String nextToken() throws EOFException { + StringBuffer ret = null; + boolean done = false; + + while(! done) { + + if(! tokenizer.hasMoreTokens()) { + if(! fillTokenizer()) { + break; + } + } + + try { + final String nextToken = tokenizer.nextToken(); + + if(nextToken.equals(separator)) { + done = true; + } else if(ret == null) { + ret = new StringBuffer(nextToken); + } else { + ret.append(nextToken); + } + } catch(NoSuchElementException e) { + if (ret == null) { + throw new EOFException(e.getMessage()); + } + done = true; + } + } // while ! done + return ret == null ? null : ret.toString(); + } + }; + + + /** + * The simple constructor which assumes ctl-a, ctl-b and '\n' separators and to return empty strings for empty fields. + * + * @param trans - the ttransport to use as input or output + * + */ + + public TCTLSeparatedProtocol(TTransport trans) { + this(trans, defaultPrimarySeparatorByte, defaultSecondarySeparatorByte, defaultMapSeparatorByte, defaultRowSeparatorByte, false, 4096); + } + + public TCTLSeparatedProtocol(TTransport trans, int buffer_size) { + this(trans, defaultPrimarySeparatorByte, defaultSecondarySeparatorByte, defaultMapSeparatorByte, defaultRowSeparatorByte, false, buffer_size); + } + + /** + * @param trans - the ttransport to use as input or output + * @param primarySeparatorByte the separator between columns (aka fields) + * @param secondarySeparatorByte the separator within a field for things like sets and maps and lists + * @param mapSeparatorByte - the key/value separator + * @param rowSeparatorByte - the record separator + * @param returnNulls - whether to return a null or an empty string for fields that seem empty (ie two primary separators back to back) + */ + + public TCTLSeparatedProtocol(TTransport trans, byte primarySeparatorByte, byte secondarySeparatorByte, byte mapSeparatorByte, byte rowSeparatorByte, + boolean returnNulls, + int bufferSize) { + super(trans); + + returnNulls = returnNulls; + + + this.primarySeparatorByte = primarySeparatorByte; + this.secondarySeparatorByte = secondarySeparatorByte; + this.rowSeparatorByte = rowSeparatorByte; + this.mapSeparatorByte = mapSeparatorByte; + + this.innerTransport = trans; + this.bufferSize = bufferSize; + + internalInitialize(); + } + + + /** + * Sets the internal separator patterns and creates the internal tokenizer. + */ + protected void internalInitialize() { + byte []primarySeparator = new byte[1]; + byte []secondarySeparator = new byte[1]; + primarySeparator[0] = primarySeparatorByte; + secondarySeparator[0] = secondarySeparatorByte; + + primaryPattern = Pattern.compile(new String(primarySeparator)); + secondaryPattern = Pattern.compile(new String(secondarySeparator)); + mapPattern = Pattern.compile("\\0" + secondarySeparatorByte + "|\\0" + mapSeparatorByte); + + transportTokenizer = new SimpleTransportTokenizer(innerTransport, rowSeparatorByte, bufferSize); + } + + /** + * Initialize the TProtocol + * @param conf System properties + * @param tbl table properties + * @throws TException + */ + public void initialize(Configuration conf, Properties tbl) throws TException { + primarySeparatorByte = Byte.valueOf(tbl.getProperty(Constants.FIELD_DELIM, String.valueOf(primarySeparatorByte))).byteValue(); + LOG.debug("collections delim=<" + tbl.getProperty(Constants.COLLECTION_DELIM) + ">" ); + secondarySeparatorByte = Byte.valueOf(tbl.getProperty(Constants.COLLECTION_DELIM, String.valueOf(secondarySeparatorByte))).byteValue(); + rowSeparatorByte = Byte.valueOf(tbl.getProperty(Constants.LINE_DELIM, String.valueOf(rowSeparatorByte))).byteValue(); + mapSeparatorByte = Byte.valueOf(tbl.getProperty(Constants.MAPKEY_DELIM, String.valueOf(mapSeparatorByte))).byteValue(); + returnNulls = Boolean.valueOf(tbl.getProperty(ReturnNullsKey, String.valueOf(returnNulls))).booleanValue(); + bufferSize = Integer.valueOf(tbl.getProperty(BufferSizeKey, String.valueOf(bufferSize))).intValue(); + + internalInitialize(); + + } + + public void writeMessageBegin(TMessage message) throws TException { + } + + public void writeMessageEnd() throws TException { + } + + public void writeStructBegin(TStruct struct) throws TException { + firstField = true; + } + + public void writeStructEnd() throws TException { + // We don't write rowSeparatorByte because that should be handled by file format. + } + + public void writeFieldBegin(TField field) throws TException { + if(! firstField) { + writeByte(primarySeparatorByte); + } + firstField = false; + } + + public void writeFieldEnd() throws TException { + } + + public void writeFieldStop() { + } + + public void writeMapBegin(TMap map) throws TException { + // nesting not allowed! + if(map.keyType == TType.STRUCT || + map.keyType == TType.MAP || + map.keyType == TType.LIST || + map.keyType == TType.SET) { + throw new TException("Not implemented: nested structures"); + } + // nesting not allowed! + if(map.valueType == TType.STRUCT || + map.valueType == TType.MAP || + map.valueType == TType.LIST || + map.valueType == TType.SET) { + throw new TException("Not implemented: nested structures"); + } + + firstInnerField = true; + isMap = true; + inner = true; + elemIndex = 0; + } + + public void writeMapEnd() throws TException { + isMap = false; + inner = false; + } + + public void writeListBegin(TList list) throws TException { + if(list.elemType == TType.STRUCT || + list.elemType == TType.MAP || + list.elemType == TType.LIST || + list.elemType == TType.SET) { + throw new TException("Not implemented: nested structures"); + } + firstInnerField = true; + inner = true; + } + + public void writeListEnd() throws TException { + inner = false; + } + + public void writeSetBegin(TSet set) throws TException { + if(set.elemType == TType.STRUCT || + set.elemType == TType.MAP || + set.elemType == TType.LIST || + set.elemType == TType.SET) { + throw new TException("Not implemented: nested structures"); + } + firstInnerField = true; + inner = true; + } + + public void writeSetEnd() throws TException { + inner = false; + } + + public void writeBool(boolean b) throws TException { + writeString(String.valueOf(b)); + } + + // for writing out single byte + private byte buf[] = new byte[1]; + public void writeByte(byte b) throws TException { + buf[0] = b; + trans_.write(buf); + } + + public void writeI16(short i16) throws TException { + writeString(String.valueOf(i16)); + } + + public void writeI32(int i32) throws TException { + writeString(String.valueOf(i32)); + } + + public void writeI64(long i64) throws TException { + writeString(String.valueOf(i64)); + } + + public void writeDouble(double dub) throws TException { + writeString(String.valueOf(dub)); + } + + public void writeString(String str) throws TException { + if(inner) { + if(!firstInnerField) { + // super hack city notice the mod plus only happens after firstfield hit, so == 0 is right. + if(isMap && elemIndex++ % 2 == 0) { + writeByte(mapSeparatorByte); + } else { + writeByte(secondarySeparatorByte); + } + } else { + firstInnerField = false; + } + } + final byte buf[] = str.getBytes(); + trans_.write(buf, 0, buf.length); + } + + public void writeBinary(byte[] bin) throws TException { + throw new TException("Ctl separated protocol cannot support writing Binary data!"); + } + + public TMessage readMessageBegin() throws TException { + return new TMessage(); + } + + public void readMessageEnd() throws TException { + } + + public TStruct readStructBegin() throws TException { + assert(!inner); + try { + final String tmp = transportTokenizer.nextToken(); + columns = primaryPattern.split(tmp); + index = 0; + return new TStruct(); + } catch(EOFException e) { + return null; + } + } + + public void readStructEnd() throws TException { + columns = null; + } + + public TField readFieldBegin() throws TException { + assert( !inner); + TField f = new TField(); + // slight hack to communicate to DynamicSerDe that the field ids are not being set but things are ordered. + f.type = -1; + return f; + } + + public void readFieldEnd() throws TException { + fields = null; + } + + public TMap readMapBegin() throws TException { + assert( !inner); + TMap map = new TMap(); + fields = mapPattern.split(columns[index++]); + if(fields != null) { + map.size = fields.length/2; + } else { + map.size = 0; + } + innerIndex = 0; + inner = true; + isMap = true; + return map; + } + + public void readMapEnd() throws TException { + inner = false; + isMap = false; + } + + public TList readListBegin() throws TException { + assert( !inner); + TList list = new TList(); + fields = secondaryPattern.split(columns[index++]); + if(fields != null) { + list.size = fields.length ; + } else { + list.size = 0; + } + innerIndex = 0; + inner = true; + return list; + } + + public void readListEnd() throws TException { + inner = false; + } + + public TSet readSetBegin() throws TException { + assert( !inner); + TSet set = new TSet(); + fields = secondaryPattern.split(columns[index++]); + if(fields != null) { + set.size = fields.length ; + } else { + set.size = 0; + } + inner = true; + innerIndex = 0; + return set; + } + + public void readSetEnd() throws TException { + inner = false; + } + public boolean readBool() throws TException { + return Boolean.valueOf(readString()).booleanValue(); + } + + public byte readByte() throws TException { + return Byte.valueOf(readString()).byteValue(); + } + + public short readI16() throws TException { + return Short.valueOf(readString()).shortValue(); + } + + public int readI32() throws TException { + return Integer.valueOf(readString()).intValue(); + } + + public long readI64() throws TException { + return Long.valueOf(readString()).longValue(); + } + + public double readDouble() throws TException { + return Double.valueOf(readString()).doubleValue(); + } + + protected String [] curMapPair; + public String readString() throws TException { + String ret; + if(!inner) { + ret = columns != null && index < columns.length ? columns[index++] : null; + } else { + ret = fields != null && innerIndex < fields.length ? fields[innerIndex++] : null; + } + return ret == null && ! returnNulls ? "" : ret; + } + + public byte[] readBinary() throws TException { + throw new TException("Not implemented for control separated data"); + } +} diff --git a/serde/src/test/org/apache/hadoop/hive/serde/TestSerDeUtils.java b/serde/src/test/org/apache/hadoop/hive/serde/TestSerDeUtils.java index 0cd1526d4..ebe99d377 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde/TestSerDeUtils.java +++ b/serde/src/test/org/apache/hadoop/hive/serde/TestSerDeUtils.java @@ -31,7 +31,7 @@ public TestSerDeUtils() throws Exception { public void testLookupSerDe() throws Exception { try { - String name = ThriftSerDe.shortName(); + String name = ThriftSerDe.class.getName(); SerDe s = SerDeUtils.lookupSerDe(name); assertTrue(s.getClass().getName().equals(org.apache.hadoop.hive.serde.thrift.ThriftSerDe.class.getName())); } catch(Exception e) { diff --git a/serde/src/test/org/apache/hadoop/hive/serde/TestTCTLSeparatedProtocol.java b/serde/src/test/org/apache/hadoop/hive/serde/TestTCTLSeparatedProtocol.java deleted file mode 100644 index 348964799..000000000 --- a/serde/src/test/org/apache/hadoop/hive/serde/TestTCTLSeparatedProtocol.java +++ /dev/null @@ -1,83 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde.thrift; - -import junit.framework.TestCase; -import java.io.*; -import org.apache.hadoop.hive.serde.*; -import org.apache.hadoop.hive.serde.thrift.*; -import java.util.*; -import com.facebook.thrift.TException; -import com.facebook.thrift.transport.*; -import com.facebook.thrift.*; -import com.facebook.thrift.protocol.*; - -public class TestTCTLSeparatedProtocol extends TestCase { - - public TestTCTLSeparatedProtocol() throws Exception { - } - - public void testLookupSerDe() throws Exception { - try { - TMemoryBuffer trans = new TMemoryBuffer(1024); - String foo = "Hello"; - String bar = "World!"; - - byte separator [] = { TCTLSeparatedProtocol.defaultPrimarySeparatorChar_ }; - - - trans.write(foo.getBytes(), 0, foo.getBytes().length); - trans.write(separator, 0, 1); - trans.write(separator, 0, 1); - trans.write(bar.getBytes(), 0, bar.getBytes().length); - trans.flush(); - - // use 3 as the row buffer size to force lots of re-buffering. - TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 3); - - prot.readStructBegin(); - - prot.readFieldBegin(); - String hello = prot.readString(); - prot.readFieldEnd(); - - assertTrue(hello.equals(foo)); - - prot.readFieldBegin(); - hello = prot.readString(); - prot.readFieldEnd(); - - assertTrue(hello.equals("")); - - prot.readFieldBegin(); - hello = prot.readString(); - prot.readFieldEnd(); - - assertTrue(hello.equals(bar)); - - prot.readFieldBegin(); - hello = prot.readString(); - prot.readFieldEnd(); - - assertTrue(hello.length() == 0); - } catch(Exception e) { - e.printStackTrace(); - } - } -} diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java b/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java new file mode 100644 index 000000000..741287dcd --- /dev/null +++ b/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java @@ -0,0 +1,245 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2; + +import junit.framework.TestCase; +import java.io.*; +import org.apache.hadoop.hive.serde2.*; +import org.apache.hadoop.hive.serde2.thrift.*; +import java.util.*; +import com.facebook.thrift.TException; +import com.facebook.thrift.transport.*; +import com.facebook.thrift.*; +import com.facebook.thrift.protocol.*; + +public class TestTCTLSeparatedProtocol extends TestCase { + + public TestTCTLSeparatedProtocol() throws Exception { + } + + public void testReads() throws Exception { + try { + TMemoryBuffer trans = new TMemoryBuffer(1024); + String foo = "Hello"; + String bar = "World!"; + + String key = "22"; + String value = "TheValue"; + String key2 = "24"; + String value2 = "TheValueAgain"; + + byte columnSeparator [] = { 1 }; + byte elementSeparator [] = { 2 }; + byte kvSeparator [] = { 3 }; + + + trans.write(foo.getBytes(), 0, foo.getBytes().length); + trans.write(columnSeparator, 0, 1); + + trans.write(columnSeparator, 0, 1); + + trans.write(bar.getBytes(), 0, bar.getBytes().length); + trans.write(columnSeparator, 0, 1); + + trans.write(key.getBytes(), 0, key.getBytes().length); + trans.write(kvSeparator, 0, 1); + trans.write(value.getBytes(), 0, value.getBytes().length); + trans.write(elementSeparator, 0, 1); + + trans.write(key2.getBytes(), 0, key2.getBytes().length); + trans.write(kvSeparator, 0, 1); + trans.write(value2.getBytes(), 0, value2.getBytes().length); + + + trans.flush(); + + + // use 3 as the row buffer size to force lots of re-buffering. + TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 3); + + prot.readStructBegin(); + + prot.readFieldBegin(); + String hello = prot.readString(); + prot.readFieldEnd(); + + assertTrue(hello.equals(foo)); + + prot.readFieldBegin(); + assertTrue(prot.readString().equals("")); + prot.readFieldEnd(); + + prot.readFieldBegin(); + assertTrue(prot.readString().equals(bar)); + prot.readFieldEnd(); + + prot.readFieldBegin(); + TMap mapHeader = prot.readMapBegin(); + assertTrue(mapHeader.size == 2); + + assertTrue(prot.readI32() == 22); + assertTrue(prot.readString().equals(value)); + assertTrue(prot.readI32() == 24); + assertTrue(prot.readString().equals(value2)); + prot.readMapEnd(); + prot.readFieldEnd(); + + prot.readFieldBegin(); + hello = prot.readString(); + prot.readFieldEnd(); + assertTrue(hello.length() == 0); + + prot.readStructEnd(); + + } catch(Exception e) { + e.printStackTrace(); + } + } + + + public void testWrites() throws Exception { + try { + TMemoryBuffer trans = new TMemoryBuffer(1024); + TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 3); + + prot.writeStructBegin(new TStruct()); + prot.writeFieldBegin(new TField()); + prot.writeI32(100); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeListBegin(new TList()); + prot.writeDouble(348.55); + prot.writeDouble(234.22); + prot.writeListEnd(); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeString("hello world!"); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeMapBegin(new TMap()); + prot.writeString("key1"); + prot.writeString("val1"); + prot.writeString("key2"); + prot.writeString("val2"); + prot.writeString("key3"); + prot.writeString("val3"); + prot.writeMapEnd(); + prot.writeFieldEnd(); + + prot.writeFieldBegin(new TField()); + prot.writeListBegin(new TList()); + prot.writeString("elem1"); + prot.writeString("elem2"); + prot.writeListEnd(); + prot.writeFieldEnd(); + + + prot.writeFieldBegin(new TField()); + prot.writeString("bye!"); + prot.writeFieldEnd(); + + prot.writeStructEnd(); + trans.flush(); + byte b[] = new byte[3*1024]; + int len = trans.read(b,0,b.length); + String test = new String(b, 0, len); + + String testRef = "100348.55234.22hello world!key1val1key2val2key3val3elem1elem2bye!"; + // System.err.println("test=" + test + ">"); + // System.err.println(" ref=" + testRef + ">"); + assertTrue(test.equals(testRef)); + + trans = new TMemoryBuffer(1023); + trans.write(b, 0, len); + + // + // read back! + // + + prot = new TCTLSeparatedProtocol(trans, 10); + + // 100 is the start + prot.readStructBegin(); + prot.readFieldBegin(); + assertTrue(prot.readI32() == 100); + prot.readFieldEnd(); + + // let's see if doubles work ok + prot.readFieldBegin(); + TList l = prot.readListBegin(); + assertTrue(l.size == 2); + assertTrue(prot.readDouble() == 348.55); + assertTrue(prot.readDouble() == 234.22); + prot.readListEnd(); + prot.readFieldEnd(); + + // nice message + prot.readFieldBegin(); + assertTrue(prot.readString().equals("hello world!")); + prot.readFieldEnd(); + + // 3 element map + prot.readFieldBegin(); + TMap m = prot.readMapBegin(); + assertTrue(m.size == 3); + assertTrue(prot.readString().equals("key1")); + assertTrue(prot.readString().equals("val1")); + assertTrue(prot.readString().equals("key2")); + assertTrue(prot.readString().equals("val2")); + assertTrue(prot.readString().equals("key3")); + assertTrue(prot.readString().equals("val3")); + prot.readMapEnd(); + prot.readFieldEnd(); + + // the 2 element list + prot.readFieldBegin(); + l = prot.readListBegin(); + assertTrue(l.size == 2); + assertTrue(prot.readString().equals("elem1")); + assertTrue(prot.readString().equals("elem2")); + prot.readListEnd(); + prot.readFieldEnd(); + + // final string + prot.readFieldBegin(); + assertTrue(prot.readString().equals("bye!")); + prot.readFieldEnd(); + + // shouldl return nulls at end + prot.readFieldBegin(); + assertTrue(prot.readString().equals("")); + prot.readFieldEnd(); + + // shouldl return nulls at end + prot.readFieldBegin(); + assertTrue(prot.readString().equals("")); + prot.readFieldEnd(); + + prot.readStructEnd(); + + + } catch(Exception e) { + e.printStackTrace(); + } + } + +} diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java b/serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java new file mode 100644 index 000000000..8f66e7b2a --- /dev/null +++ b/serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java @@ -0,0 +1,204 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.dynamic_type; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Properties; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol; +import org.apache.hadoop.hive.serde.Constants; + +import junit.framework.TestCase; +import org.apache.hadoop.io.BytesWritable; + +public class TestDynamicSerDe extends TestCase { + + public void testDynamicSerDe() throws Throwable { + try { + + // Try to construct an object + ArrayList bye = new ArrayList(); + bye.add("firstString"); + bye.add("secondString"); + HashMap another = new HashMap(); + another.put("firstKey", 1); + another.put("secondKey", 2); + ArrayList struct = new ArrayList(); + struct.add(Integer.valueOf(234)); + struct.add(bye); + struct.add(another); + + // All protocols + ArrayList protocols = new ArrayList(); + ArrayList isBinaries = new ArrayList(); + + protocols.add(com.facebook.thrift.protocol.TBinaryProtocol.class.getName()); + isBinaries.add(true); + + protocols.add(com.facebook.thrift.protocol.TJSONProtocol.class.getName()); + isBinaries.add(false); + + // TSimpleJSONProtocol does not support deserialization. + // protocols.add(com.facebook.thrift.protocol.TSimpleJSONProtocol.class.getName()); + // isBinaries.add(false); + + // TCTLSeparatedProtocol is not done yet. + protocols.add(org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); + isBinaries.add(false); + + System.out.println("input struct = " + struct); + + for(int pp = 0; pp bye, map another}"); + schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + + DynamicSerDe serde = new DynamicSerDe(); + serde.initialize(new Configuration(), schema); + + // Try getObjectInspector + ObjectInspector oi = serde.getObjectInspector(); + System.out.println("TypeName = " + oi.getTypeName()); + + + // Try to serialize + BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); + + StringBuilder sb = new StringBuilder(); + for (int i=0; i another = new HashMap(); + another.put("firstKey", 1); + another.put("secondKey", 2); + ArrayList struct = new ArrayList(); + struct.add(Integer.valueOf(234)); + struct.add(bye); + struct.add(another); + + Properties schema = new Properties(); + schema.setProperty(Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName()); + schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, "test"); + schema.setProperty(Constants.SERIALIZATION_DDL, + "struct test { i32 hello, list bye, map another}"); + schema.setProperty(Constants.SERIALIZATION_LIB, new DynamicSerDe().getClass().toString()); + + schema.setProperty(Constants.FIELD_DELIM, "9"); + schema.setProperty(Constants.COLLECTION_DELIM, "1"); + schema.setProperty(Constants.LINE_DELIM, "2"); + schema.setProperty(Constants.MAPKEY_DELIM, "4"); + + DynamicSerDe serde = new DynamicSerDe(); + serde.initialize(new Configuration(), schema); + + TCTLSeparatedProtocol prot = (TCTLSeparatedProtocol)serde.oprot_; + assertTrue(prot.getPrimarySeparator() == 9); + + ObjectInspector oi = serde.getObjectInspector(); + + // Try to serialize + BytesWritable bytes = (BytesWritable) serde.serialize(struct, oi); + + StringBuilder sb = new StringBuilder(); + for (int i=0; i"); + System.out.println("compare to =" + compare + ">"); + + assertTrue(compare.equals( new String(bytes.get(), 0, bytes.getSize()))); + + // Try to deserialize + Object o = serde.deserialize(bytes); + System.out.println("o class = " + o.getClass()); + List olist = (List)o; + System.out.println("o size = " + olist.size()); + System.out.println("o[0] class = " + olist.get(0).getClass()); + System.out.println("o[1] class = " + olist.get(1).getClass()); + System.out.println("o[2] class = " + olist.get(2).getClass()); + System.out.println("o = " + o); + + assertEquals(o, struct); + + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } + + } +} diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java index 90c0dcc52..3d0b7335e 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java @@ -22,8 +22,8 @@ import java.util.List; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; -import org.apache.hadoop.hive.serde2.thrift_test.Complex; -import org.apache.hadoop.hive.serde2.thrift_test.IntString; +import org.apache.hadoop.hive.serde2.thrift.test.Complex; +import org.apache.hadoop.hive.serde2.thrift.test.IntString; import junit.framework.TestCase; @@ -50,14 +50,14 @@ public void testObjectInspectorUtils() throws Throwable { // real object Complex cc = new Complex(); cc.aint = 1; - cc.astring = "test"; + cc.aString = "test"; List c2 = Arrays.asList(new Integer[]{1,2,3}); cc.lint = c2; List c3 = Arrays.asList(new String[]{"one", "two"}); - cc.lstring = c3; + cc.lString = c3; List c4 = new ArrayList(); - cc.lintstring = c4; - cc.mstringstring = null; + cc.lintString = c4; + cc.mStringString = null; // standard object Object c = ObjectInspectorUtils.getStandardObject(cc, oi1); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java index 2863d7903..0692d025f 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java @@ -22,8 +22,8 @@ import java.util.List; import junit.framework.TestCase; -import org.apache.hadoop.hive.serde2.thrift_test.Complex; -import org.apache.hadoop.hive.serde2.thrift_test.IntString; +import org.apache.hadoop.hive.serde2.thrift.test.Complex; +import org.apache.hadoop.hive.serde2.thrift.test.IntString; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; @@ -53,14 +53,14 @@ public void testThriftObjectInspectors() throws Throwable { // real object Complex c = new Complex(); c.aint = 1; - c.astring = "test"; + c.aString = "test"; List c2 = Arrays.asList(new Integer[]{1,2,3}); c.lint = c2; List c3 = Arrays.asList(new String[]{"one", "two"}); - c.lstring = c3; + c.lString = c3; List c4 = new ArrayList(); - c.lintstring = c4; - c.mstringstring = null; + c.lintString = c4; + c.mStringString = null; assertEquals(1, soi.getStructFieldData(c, fields.get(0))); assertEquals("test", soi.getStructFieldData(c, fields.get(1))); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/Complex.java b/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/Complex.java deleted file mode 100644 index 431ae0b29..000000000 --- a/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/Complex.java +++ /dev/null @@ -1,363 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Autogenerated by Thrift - * - * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING - */ -package org.apache.hadoop.hive.serde2.thrift_test; - -import java.util.List; -import java.util.ArrayList; -import java.util.Map; -import java.util.HashMap; -import java.util.Set; -import java.util.HashSet; -import com.facebook.thrift.*; - -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; - -public class Complex implements TBase, java.io.Serializable { - public int aint; - public String astring; - public List lint; - public List lstring; - public List lintstring; - public Map mstringstring; - - public final Isset __isset = new Isset(); - public static final class Isset implements java.io.Serializable { - public boolean aint = false; - public boolean astring = false; - public boolean lint = false; - public boolean lstring = false; - public boolean lintstring = false; - public boolean mstringstring = false; - } - - public Complex() { - } - - public Complex( - int aint, - String astring, - List lint, - List lstring, - List lintstring, - Map mstringstring) - { - this(); - this.aint = aint; - this.__isset.aint = true; - this.astring = astring; - this.__isset.astring = true; - this.lint = lint; - this.__isset.lint = true; - this.lstring = lstring; - this.__isset.lstring = true; - this.lintstring = lintstring; - this.__isset.lintstring = true; - this.mstringstring = mstringstring; - this.__isset.mstringstring = true; - } - - public boolean equals(Object that) { - if (that == null) - return false; - if (that instanceof Complex) - return this.equals((Complex)that); - return false; - } - - public boolean equals(Complex that) { - if (that == null) - return false; - - boolean this_present_aint = true; - boolean that_present_aint = true; - if (this_present_aint || that_present_aint) { - if (!(this_present_aint && that_present_aint)) - return false; - if (this.aint != that.aint) - return false; - } - - boolean this_present_astring = true && (this.astring != null); - boolean that_present_astring = true && (that.astring != null); - if (this_present_astring || that_present_astring) { - if (!(this_present_astring && that_present_astring)) - return false; - if (!this.astring.equals(that.astring)) - return false; - } - - boolean this_present_lint = true && (this.lint != null); - boolean that_present_lint = true && (that.lint != null); - if (this_present_lint || that_present_lint) { - if (!(this_present_lint && that_present_lint)) - return false; - if (!this.lint.equals(that.lint)) - return false; - } - - boolean this_present_lstring = true && (this.lstring != null); - boolean that_present_lstring = true && (that.lstring != null); - if (this_present_lstring || that_present_lstring) { - if (!(this_present_lstring && that_present_lstring)) - return false; - if (!this.lstring.equals(that.lstring)) - return false; - } - - boolean this_present_lintstring = true && (this.lintstring != null); - boolean that_present_lintstring = true && (that.lintstring != null); - if (this_present_lintstring || that_present_lintstring) { - if (!(this_present_lintstring && that_present_lintstring)) - return false; - if (!this.lintstring.equals(that.lintstring)) - return false; - } - - boolean this_present_mstringstring = true && (this.mstringstring != null); - boolean that_present_mstringstring = true && (that.mstringstring != null); - if (this_present_mstringstring || that_present_mstringstring) { - if (!(this_present_mstringstring && that_present_mstringstring)) - return false; - if (!this.mstringstring.equals(that.mstringstring)) - return false; - } - - return true; - } - - public int hashCode() { - return 0; - } - - public void read(TProtocol iprot) throws TException { - TField field; - iprot.readStructBegin(); - while (true) - { - field = iprot.readFieldBegin(); - if (field.type == TType.STOP) { - break; - } - switch (field.id) - { - case 1: - if (field.type == TType.I32) { - this.aint = iprot.readI32(); - this.__isset.aint = true; - } else { - TProtocolUtil.skip(iprot, field.type); - } - break; - case 2: - if (field.type == TType.STRING) { - this.astring = iprot.readString(); - this.__isset.astring = true; - } else { - TProtocolUtil.skip(iprot, field.type); - } - break; - case 3: - if (field.type == TType.LIST) { - { - TList _list0 = iprot.readListBegin(); - this.lint = new ArrayList(_list0.size); - for (int _i1 = 0; _i1 < _list0.size; ++_i1) - { - int _elem2 = 0; - _elem2 = iprot.readI32(); - this.lint.add(_elem2); - } - iprot.readListEnd(); - } - this.__isset.lint = true; - } else { - TProtocolUtil.skip(iprot, field.type); - } - break; - case 4: - if (field.type == TType.LIST) { - { - TList _list3 = iprot.readListBegin(); - this.lstring = new ArrayList(_list3.size); - for (int _i4 = 0; _i4 < _list3.size; ++_i4) - { - String _elem5 = null; - _elem5 = iprot.readString(); - this.lstring.add(_elem5); - } - iprot.readListEnd(); - } - this.__isset.lstring = true; - } else { - TProtocolUtil.skip(iprot, field.type); - } - break; - case 5: - if (field.type == TType.LIST) { - { - TList _list6 = iprot.readListBegin(); - this.lintstring = new ArrayList(_list6.size); - for (int _i7 = 0; _i7 < _list6.size; ++_i7) - { - IntString _elem8 = new IntString(); - _elem8 = new IntString(); - _elem8.read(iprot); - this.lintstring.add(_elem8); - } - iprot.readListEnd(); - } - this.__isset.lintstring = true; - } else { - TProtocolUtil.skip(iprot, field.type); - } - break; - case 6: - if (field.type == TType.MAP) { - { - TMap _map9 = iprot.readMapBegin(); - this.mstringstring = new HashMap(2*_map9.size); - for (int _i10 = 0; _i10 < _map9.size; ++_i10) - { - String _key11; - String _val12; - _key11 = iprot.readString(); - _val12 = iprot.readString(); - this.mstringstring.put(_key11, _val12); - } - iprot.readMapEnd(); - } - this.__isset.mstringstring = true; - } else { - TProtocolUtil.skip(iprot, field.type); - } - break; - default: - TProtocolUtil.skip(iprot, field.type); - break; - } - iprot.readFieldEnd(); - } - iprot.readStructEnd(); - } - - public void write(TProtocol oprot) throws TException { - TStruct struct = new TStruct("Complex"); - oprot.writeStructBegin(struct); - TField field = new TField(); - field.name = "aint"; - field.type = TType.I32; - field.id = 1; - oprot.writeFieldBegin(field); - oprot.writeI32(this.aint); - oprot.writeFieldEnd(); - if (this.astring != null) { - field.name = "astring"; - field.type = TType.STRING; - field.id = 2; - oprot.writeFieldBegin(field); - oprot.writeString(this.astring); - oprot.writeFieldEnd(); - } - if (this.lint != null) { - field.name = "lint"; - field.type = TType.LIST; - field.id = 3; - oprot.writeFieldBegin(field); - { - oprot.writeListBegin(new TList(TType.I32, this.lint.size())); - for (int _iter13 : this.lint) { - oprot.writeI32(_iter13); - } - oprot.writeListEnd(); - } - oprot.writeFieldEnd(); - } - if (this.lstring != null) { - field.name = "lstring"; - field.type = TType.LIST; - field.id = 4; - oprot.writeFieldBegin(field); - { - oprot.writeListBegin(new TList(TType.STRING, this.lstring.size())); - for (String _iter14 : this.lstring) { - oprot.writeString(_iter14); - } - oprot.writeListEnd(); - } - oprot.writeFieldEnd(); - } - if (this.lintstring != null) { - field.name = "lintstring"; - field.type = TType.LIST; - field.id = 5; - oprot.writeFieldBegin(field); - { - oprot.writeListBegin(new TList(TType.STRUCT, this.lintstring.size())); - for (IntString _iter15 : this.lintstring) { - _iter15.write(oprot); - } - oprot.writeListEnd(); - } - oprot.writeFieldEnd(); - } - if (this.mstringstring != null) { - field.name = "mstringstring"; - field.type = TType.MAP; - field.id = 6; - oprot.writeFieldBegin(field); - { - oprot.writeMapBegin(new TMap(TType.STRING, TType.STRING, this.mstringstring.size())); - for (String _iter16 : this.mstringstring.keySet()) { - oprot.writeString(_iter16); - oprot.writeString(this.mstringstring.get(_iter16)); - } - oprot.writeMapEnd(); - } - oprot.writeFieldEnd(); - } - oprot.writeFieldStop(); - oprot.writeStructEnd(); - } - - public String toString() { - StringBuilder sb = new StringBuilder("Complex("); - sb.append("aint:"); - sb.append(this.aint); - sb.append(",astring:"); - sb.append(this.astring); - sb.append(",lint:"); - sb.append(this.lint); - sb.append(",lstring:"); - sb.append(this.lstring); - sb.append(",lintstring:"); - sb.append(this.lintstring); - sb.append(",mstringstring:"); - sb.append(this.mstringstring); - sb.append(")"); - return sb.toString(); - } - -} - diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/IntString.java b/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/IntString.java deleted file mode 100644 index 81d342ae3..000000000 --- a/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/IntString.java +++ /dev/null @@ -1,167 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Autogenerated by Thrift - * - * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING - */ -package org.apache.hadoop.hive.serde2.thrift_test; - -import java.util.List; -import java.util.ArrayList; -import java.util.Map; -import java.util.HashMap; -import java.util.Set; -import java.util.HashSet; -import com.facebook.thrift.*; - -import com.facebook.thrift.protocol.*; -import com.facebook.thrift.transport.*; - -public class IntString implements TBase, java.io.Serializable { - public int myint; - public String mystring; - - public final Isset __isset = new Isset(); - public static final class Isset implements java.io.Serializable { - public boolean myint = false; - public boolean mystring = false; - } - - public IntString() { - } - - public IntString( - int myint, - String mystring) - { - this(); - this.myint = myint; - this.__isset.myint = true; - this.mystring = mystring; - this.__isset.mystring = true; - } - - public boolean equals(Object that) { - if (that == null) - return false; - if (that instanceof IntString) - return this.equals((IntString)that); - return false; - } - - public boolean equals(IntString that) { - if (that == null) - return false; - - boolean this_present_myint = true; - boolean that_present_myint = true; - if (this_present_myint || that_present_myint) { - if (!(this_present_myint && that_present_myint)) - return false; - if (this.myint != that.myint) - return false; - } - - boolean this_present_mystring = true && (this.mystring != null); - boolean that_present_mystring = true && (that.mystring != null); - if (this_present_mystring || that_present_mystring) { - if (!(this_present_mystring && that_present_mystring)) - return false; - if (!this.mystring.equals(that.mystring)) - return false; - } - - return true; - } - - public int hashCode() { - return 0; - } - - public void read(TProtocol iprot) throws TException { - TField field; - iprot.readStructBegin(); - while (true) - { - field = iprot.readFieldBegin(); - if (field.type == TType.STOP) { - break; - } - switch (field.id) - { - case 1: - if (field.type == TType.I32) { - this.myint = iprot.readI32(); - this.__isset.myint = true; - } else { - TProtocolUtil.skip(iprot, field.type); - } - break; - case 2: - if (field.type == TType.STRING) { - this.mystring = iprot.readString(); - this.__isset.mystring = true; - } else { - TProtocolUtil.skip(iprot, field.type); - } - break; - default: - TProtocolUtil.skip(iprot, field.type); - break; - } - iprot.readFieldEnd(); - } - iprot.readStructEnd(); - } - - public void write(TProtocol oprot) throws TException { - TStruct struct = new TStruct("IntString"); - oprot.writeStructBegin(struct); - TField field = new TField(); - field.name = "myint"; - field.type = TType.I32; - field.id = 1; - oprot.writeFieldBegin(field); - oprot.writeI32(this.myint); - oprot.writeFieldEnd(); - if (this.mystring != null) { - field.name = "mystring"; - field.type = TType.STRING; - field.id = 2; - oprot.writeFieldBegin(field); - oprot.writeString(this.mystring); - oprot.writeFieldEnd(); - } - oprot.writeFieldStop(); - oprot.writeStructEnd(); - } - - public String toString() { - StringBuilder sb = new StringBuilder("IntString("); - sb.append("myint:"); - sb.append(this.myint); - sb.append(",mystring:"); - sb.append(this.mystring); - sb.append(")"); - return sb.toString(); - } - -} - diff --git a/testutils/run_tests b/testutils/run_tests index d5b085c1f..9c76880a4 100755 --- a/testutils/run_tests +++ b/testutils/run_tests @@ -2,9 +2,11 @@ d=`mktemp -d /tmp/hivetest_XXXX` for i in `find . -name Test\*\.* | grep -v svn | egrep "java$|vm$" | sed 's/.*\/Test/Test/g' | sed 's/\.java//g' | sed 's/\.vm//g' | sort`; do - cmd="ant -lib ../../../lib -Dtestcase=$i clean-test test -logfile $d/$i.log" - echo $cmd; - $cmd; + if [ "$i" != "TestSerDe" ]; then + cmd="ant -lib ../../../lib -Dtestcase=$i clean-test test -logfile $d/$i.log" + echo $cmd; + $cmd; + fi done cat $d/*.log | grep junit | egrep "Running org|Tests run" echo Logs at $d