From a82caa23526761bae23561c0ddc40372eb425b2d Mon Sep 17 00:00:00 2001
From: araika <araikasingh@gmail.com>
Date: Wed, 5 Mar 2025 10:05:16 +0530
Subject: [PATCH 1/2] HIVE-20889: Support timestamp-micros in AvroSerDe

---
 .../hadoop/hive/common/type/Timestamp.java    | 17 ++++++++
 .../clientpositive/avro_timestamp_micros.q    |  3 ++
 .../llap/avro_timestamp_micros.q.out          | 26 ++++++++++++
 .../hive/serde2/avro/AvroDeserializer.java    | 19 +++++++--
 .../hadoop/hive/serde2/avro/AvroSerDe.java    |  3 +-
 .../hive/serde2/avro/AvroSerializer.java      | 15 ++++++-
 .../hive/serde2/avro/SchemaToTypeInfo.java    |  3 +-
 .../hive/serde2/avro/TypeInfoToSchema.java    |  2 +-
 .../TestAvroObjectInspectorGenerator.java     |  2 +-
 .../hive/serde2/avro/TestAvroSerializer.java  |  2 +-
 .../hive/common/type/CalendarUtils.java       | 42 +++++++++++++++++++
 11 files changed, 123 insertions(+), 11 deletions(-)
 create mode 100644 ql/src/test/queries/clientpositive/avro_timestamp_micros.q
 create mode 100644 ql/src/test/results/clientpositive/llap/avro_timestamp_micros.q.out

diff --git a/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java b/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java
index c026f8a8e9e8..1ac7436f50d2 100644
--- a/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java
+++ b/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java
@@ -162,6 +162,11 @@ public long toEpochMilli() {
     return localDateTime.toInstant(ZoneOffset.UTC).toEpochMilli();
   }
 
+  public long toEpochMicro() {
+    return localDateTime.toEpochSecond(ZoneOffset.UTC) * 1_000_000
+            + localDateTime.getNano() / 1000;
+  }
+
   public long toEpochMilli(ZoneId id) {
     return localDateTime.atZone(id).toInstant().toEpochMilli();
   }
@@ -236,6 +241,18 @@ public static Timestamp ofEpochMilli(long epochMilli, int nanos) {
         .withNano(nanos));
   }
 
+  public static Timestamp ofEpochMicro(long epochMicro) {
+    int nanos = (int) ((epochMicro % 1000000) * 1000);
+    epochMicro -= nanos / 1_000_000;
+
+    Instant instant = Instant.ofEpochSecond(
+            epochMicro / 1_000_000,
+            nanos
+    );
+
+    return new Timestamp(LocalDateTime.ofInstant(instant, ZoneOffset.UTC));
+  }
+
   public void setNanos(int nanos) {
     localDateTime = localDateTime.withNano(nanos);
   }
diff --git a/ql/src/test/queries/clientpositive/avro_timestamp_micros.q b/ql/src/test/queries/clientpositive/avro_timestamp_micros.q
new file mode 100644
index 000000000000..a1de1deb5479
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/avro_timestamp_micros.q
@@ -0,0 +1,3 @@
+CREATE EXTERNAL TABLE hive_test(`dt` timestamp) STORED AS AVRO;
+INSERT INTO hive_test VALUES (cast('2024-08-09 14:08:26.326107' as timestamp));
+SELECT * FROM hive_test;
\ No newline at end of file
diff --git a/ql/src/test/results/clientpositive/llap/avro_timestamp_micros.q.out b/ql/src/test/results/clientpositive/llap/avro_timestamp_micros.q.out
new file mode 100644
index 000000000000..7e58380fdbc0
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/avro_timestamp_micros.q.out
@@ -0,0 +1,26 @@
+PREHOOK: query: CREATE EXTERNAL TABLE hive_test(`dt` timestamp) STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hive_test
+POSTHOOK: query: CREATE EXTERNAL TABLE hive_test(`dt` timestamp) STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hive_test
+PREHOOK: query: INSERT INTO hive_test VALUES (cast('2024-08-09 14:08:26.326107' as timestamp))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hive_test
+POSTHOOK: query: INSERT INTO hive_test VALUES (cast('2024-08-09 14:08:26.326107' as timestamp))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hive_test
+POSTHOOK: Lineage: hive_test.dt SCRIPT []
+PREHOOK: query: SELECT * FROM hive_test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT * FROM hive_test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2024-08-09 14:08:26.326107
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
index 11d66277ef92..f866f973e3c4 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
@@ -31,6 +31,7 @@
 import java.util.Map;
 import java.util.TimeZone;
 
+import org.apache.avro.LogicalType;
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Type;
 import org.apache.avro.generic.GenericData;
@@ -388,11 +389,21 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
           skipProlepticConversion = HiveConf.ConfVars.HIVE_AVRO_PROLEPTIC_GREGORIAN_DEFAULT.defaultBoolVal;
         }
       }
-      Timestamp timestamp = TimestampTZUtil.convertTimestampToZone(
-          Timestamp.ofEpochMilli((Long) datum), ZoneOffset.UTC, convertToTimeZone, legacyConversion);
-      if (!skipProlepticConversion) {
+      LogicalType logicalType = recordSchema.getLogicalType();
+      Timestamp timestamp;
+      if (logicalType != null && logicalType.getName().equals(AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS)) {
+        timestamp = Timestamp.ofEpochMicro((Long) datum);
+      } else {
+        timestamp = Timestamp.ofEpochMilli((Long) datum);
+      }
+      timestamp = TimestampTZUtil.convertTimestampToZone(
+              timestamp, ZoneOffset.UTC, convertToTimeZone, legacyConversion);
+      if (!skipProlepticConversion && logicalType.getName().equals(AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS)) {
+        timestamp = Timestamp.ofEpochMicro(
+            CalendarUtils.convertTimeToProlepticMicros(timestamp.toEpochMicro()));
+      } else if (!skipProlepticConversion) {
         timestamp = Timestamp.ofEpochMilli(
-            CalendarUtils.convertTimeToProleptic(timestamp.toEpochMilli()));
+                CalendarUtils.convertTimeToProleptic(timestamp.toEpochMilli()));
       }
       return timestamp;
     }
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
index 100475f06d7b..e6c42687c8b4 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
@@ -57,7 +57,8 @@ public class AvroSerDe extends AbstractSerDe {
   public static final String CHAR_TYPE_NAME = "char";
   public static final String VARCHAR_TYPE_NAME = "varchar";
   public static final String DATE_TYPE_NAME = "date";
-  public static final String TIMESTAMP_TYPE_NAME = "timestamp-millis";
+  public static final String TIMESTAMP_TYPE_NAME_MILLIS = "timestamp-millis";
+  public static final String TIMESTAMP_TYPE_NAME_MICROS = "timestamp-micros";
   public static final String WRITER_TIME_ZONE = "writer.time.zone";
   public static final String WRITER_PROLEPTIC = "writer.proleptic";
   public static final String WRITER_ZONE_CONVERSION_LEGACY = "writer.zone.conversion.legacy";
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
index 409ad9d23fd3..2eeaf14a7c25 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hive.serde2.avro;
 
+import java.time.Instant;
+import java.time.LocalDateTime;
 import java.time.ZoneOffset;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -24,6 +26,7 @@
 import java.util.Set;
 import java.util.TimeZone;
 
+import org.apache.avro.LogicalType;
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Field;
 import org.apache.avro.Schema.Type;
@@ -231,10 +234,18 @@ private Object serializePrimitive(TypeInfo typeInfo, PrimitiveObjectInspector fi
     case TIMESTAMP:
       Timestamp timestamp =
         ((TimestampObjectInspector) fieldOI).getPrimitiveJavaObject(structFieldData);
+      LogicalType logicalType = schema.getLogicalType();
+      if (logicalType != null && logicalType.getName().equals(AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS)) {
+        long micros = defaultProleptic ? timestamp.toEpochMicro() :
+                CalendarUtils.convertTimeToProlepticMicros(timestamp.toEpochMicro());
+        timestamp = TimestampTZUtil.convertTimestampToZone(
+                Timestamp.ofEpochMicro(micros), TimeZone.getDefault().toZoneId(), ZoneOffset.UTC, legacyConversion);
+        return timestamp.toEpochMicro();
+      }
       long millis = defaultProleptic ? timestamp.toEpochMilli() :
-          CalendarUtils.convertTimeToHybrid(timestamp.toEpochMilli());
+              CalendarUtils.convertTimeToHybrid(timestamp.toEpochMilli());
       timestamp = TimestampTZUtil.convertTimestampToZone(
-          Timestamp.ofEpochMilli(millis), TimeZone.getDefault().toZoneId(), ZoneOffset.UTC, legacyConversion);
+              Timestamp.ofEpochMilli(millis), TimeZone.getDefault().toZoneId(), ZoneOffset.UTC, legacyConversion);
       return timestamp.toEpochMilli();
     case UNKNOWN:
       throw new AvroSerdeException("Received UNKNOWN primitive category.");
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
index aaf9f2f7a4f3..0a33c0a1423b 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
@@ -180,7 +180,8 @@ public static TypeInfo generateTypeInfo(Schema schema,
     }
 
     if (type == LONG &&
-      AvroSerDe.TIMESTAMP_TYPE_NAME.equals(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
+            (AvroSerDe.TIMESTAMP_TYPE_NAME_MILLIS.equals(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE)) ||
+                    AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS.equals(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE)))) {
       return TypeInfoFactory.timestampTypeInfo;
     }
 
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java
index 7b6af3b89307..e8d05aacdd96 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java
@@ -159,7 +159,7 @@ private Schema createAvroPrimitive(TypeInfo typeInfo) {
       case TIMESTAMP:
         schema = AvroSerdeUtils.getSchemaFor("{" +
           "\"type\":\"" + AvroSerDe.AVRO_LONG_TYPE_NAME + "\"," +
-          "\"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME + "\"}");
+          "\"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS + "\"}");
         break;
       case VOID:
         schema = Schema.create(Schema.Type.NULL);
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroObjectInspectorGenerator.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroObjectInspectorGenerator.java
index 048d3d970d6d..6f56ec47b826 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroObjectInspectorGenerator.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroObjectInspectorGenerator.java
@@ -227,7 +227,7 @@ public class TestAvroObjectInspectorGenerator {
       "  \"fields\" : [\n" +
       "    {\"name\":\"timestampField\", " +
       "     \"type\":\"" + AvroSerDe.AVRO_LONG_TYPE_NAME + "\", " +
-      "         \"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME + "\"}" +
+      "         \"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME_MILLIS + "\"}" +
       "  ]\n" +
       "}";
 
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
index bcd0fd1acf9f..3ede9f169c45 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
@@ -125,7 +125,7 @@ public void canSerializeDoubles() throws SerDeException, IOException {
   public void canSerializeTimestamps() throws SerDeException, IOException {
     singleFieldTest("timestamp1", Timestamp.valueOf("2011-01-01 00:00:00").toEpochMilli(),
         "\"" + AvroSerDe.AVRO_LONG_TYPE_NAME + "\"," +
-        "\"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME + "\"");
+        "\"logicalType\":\"" + AvroSerDe.TIMESTAMP_TYPE_NAME_MILLIS + "\"");
   }
 
   @Test
diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/type/CalendarUtils.java b/storage-api/src/java/org/apache/hadoop/hive/common/type/CalendarUtils.java
index 67a45b14d070..1412cc280495 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/common/type/CalendarUtils.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/common/type/CalendarUtils.java
@@ -157,6 +157,27 @@ public static long convertTimeToProleptic(long hybrid) {
     return proleptic;
   }
 
+  /**
+   * Convert epoch microseconds from the hybrid Julian/Gregorian calendar to the
+   * proleptic Gregorian.
+   * @param hybridMicros Microseconds of epoch in the hybrid Julian/Gregorian
+   * @return Microseconds of epoch in the proleptic Gregorian
+   */
+  public static long convertTimeToProlepticMicros(long hybridMicros) {
+    long prolepticMicros = hybridMicros;
+    long hybridMillis = hybridMicros / 1_000L; // Convert micros to millis
+
+    if (hybridMillis < SWITCHOVER_MILLIS) {
+      String dateStr = HYBRID_TIME_FORMAT.get().format(new Date(hybridMillis));
+      try {
+        prolepticMicros = PROLEPTIC_TIME_FORMAT.get().parse(dateStr).getTime() * 1_000L; // Convert millis back to micros
+      } catch (ParseException e) {
+        throw new IllegalArgumentException("Can't parse " + dateStr, e);
+      }
+    }
+    return prolepticMicros;
+  }
+
   /**
    * Convert epoch millis from the proleptic Gregorian calendar to the hybrid
    * Julian/Gregorian.
@@ -176,6 +197,27 @@ public static long convertTimeToHybrid(long proleptic) {
     return hybrid;
   }
 
+  /**
+   * Convert epoch microseconds from the proleptic Gregorian calendar to the
+   * hybrid Julian/Gregorian.
+   * @param prolepticMicros Microseconds of epoch in the proleptic Gregorian
+   * @return Microseconds of epoch in the hybrid Julian/Gregorian
+   */
+  public static long convertTimeToHybridMicros(long prolepticMicros) {
+    long hybridMicros = prolepticMicros;
+    long prolepticMillis = prolepticMicros / 1_000L; // Convert micros to millis
+
+    if (prolepticMillis < SWITCHOVER_MILLIS) {
+      String dateStr = PROLEPTIC_TIME_FORMAT.get().format(new Date(prolepticMillis));
+      try {
+        hybridMicros = HYBRID_TIME_FORMAT.get().parse(dateStr).getTime() * 1_000L; // Convert millis back to micros
+      } catch (ParseException e) {
+        throw new IllegalArgumentException("Can't parse " + dateStr, e);
+      }
+    }
+    return hybridMicros;
+  }
+
   /**
    *
    * Formats epoch day to date according to proleptic or hybrid calendar

From 5bb3c2c513d0e2d99eb4ed4738b857fe5c2ba56f Mon Sep 17 00:00:00 2001
From: Araika <araikasingh@gmail.com>
Date: Sun, 16 Mar 2025 19:36:33 +0530
Subject: [PATCH 2/2] fix imports, tests

---
 .../results/clientpositive/llap/avro_timestamp_micros.q.out   | 4 ++--
 .../org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java  | 4 ++--
 .../org/apache/hadoop/hive/serde2/avro/AvroSerializer.java    | 4 +---
 .../apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java  | 2 +-
 4 files changed, 6 insertions(+), 8 deletions(-)

diff --git a/ql/src/test/results/clientpositive/llap/avro_timestamp_micros.q.out b/ql/src/test/results/clientpositive/llap/avro_timestamp_micros.q.out
index 7e58380fdbc0..458333130956 100644
--- a/ql/src/test/results/clientpositive/llap/avro_timestamp_micros.q.out
+++ b/ql/src/test/results/clientpositive/llap/avro_timestamp_micros.q.out
@@ -18,9 +18,9 @@ POSTHOOK: Lineage: hive_test.dt SCRIPT []
 PREHOOK: query: SELECT * FROM hive_test
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hive_test
-PREHOOK: Output: hdfs://### HDFS PATH ###
+#### A masked pattern was here ####
 POSTHOOK: query: SELECT * FROM hive_test
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hive_test
-POSTHOOK: Output: hdfs://### HDFS PATH ###
+#### A masked pattern was here ####
 2024-08-09 14:08:26.326107
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
index f866f973e3c4..d7bb62a5cb2f 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
@@ -389,7 +389,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
           skipProlepticConversion = HiveConf.ConfVars.HIVE_AVRO_PROLEPTIC_GREGORIAN_DEFAULT.defaultBoolVal;
         }
       }
-      LogicalType logicalType = recordSchema.getLogicalType();
+      LogicalType logicalType = fileSchema.getLogicalType();
       Timestamp timestamp;
       if (logicalType != null && logicalType.getName().equals(AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS)) {
         timestamp = Timestamp.ofEpochMicro((Long) datum);
@@ -398,7 +398,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
       }
       timestamp = TimestampTZUtil.convertTimestampToZone(
               timestamp, ZoneOffset.UTC, convertToTimeZone, legacyConversion);
-      if (!skipProlepticConversion && logicalType.getName().equals(AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS)) {
+      if (!skipProlepticConversion && logicalType != null && logicalType.getName().equals(AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS)) {
         timestamp = Timestamp.ofEpochMicro(
             CalendarUtils.convertTimeToProlepticMicros(timestamp.toEpochMicro()));
       } else if (!skipProlepticConversion) {
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
index 2eeaf14a7c25..43071ee8f22c 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.hive.serde2.avro;
 
-import java.time.Instant;
-import java.time.LocalDateTime;
 import java.time.ZoneOffset;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -237,7 +235,7 @@ private Object serializePrimitive(TypeInfo typeInfo, PrimitiveObjectInspector fi
       LogicalType logicalType = schema.getLogicalType();
       if (logicalType != null && logicalType.getName().equals(AvroSerDe.TIMESTAMP_TYPE_NAME_MICROS)) {
         long micros = defaultProleptic ? timestamp.toEpochMicro() :
-                CalendarUtils.convertTimeToProlepticMicros(timestamp.toEpochMicro());
+                CalendarUtils.convertTimeToHybridMicros(timestamp.toEpochMicro());
         timestamp = TimestampTZUtil.convertTimestampToZone(
                 Timestamp.ofEpochMicro(micros), TimeZone.getDefault().toZoneId(), ZoneOffset.UTC, legacyConversion);
         return timestamp.toEpochMicro();
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
index ac0a8ee46dd0..029144d63781 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
@@ -261,7 +261,7 @@ public void createAvroDateSchema() {
   public void createAvroTimestampSchema() {
     final String specificSchema = "{" +
       "\"type\":\"long\"," +
-      "\"logicalType\":\"timestamp-millis\"}";
+      "\"logicalType\":\"timestamp-micros\"}";
     String expectedSchema = genSchema(specificSchema);
 
     Assert.assertEquals("Test for timestamp in avro schema failed",