Skip to content
This repository was archived by the owner on Jan 14, 2025. It is now read-only.

Commit

Permalink
Merge pull request #11 from lys0716/origin/dev-1
Browse files Browse the repository at this point in the history
Format javadoc of files and fix typo
  • Loading branch information
ovj authored Mar 18, 2019
2 parents 6f9c242 + 7a3714b commit afef8a8
Show file tree
Hide file tree
Showing 23 changed files with 21 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@
import static com.uber.marmaray.common.metrics.DataFeedMetricNames.RESULT_FAILURE;
import static com.uber.marmaray.common.metrics.DataFeedMetricNames.RESULT_SUCCESS;

@Slf4j
/**
* {@link JobDagActions} are actions that are run based on success status.
* This class is completely independent and NOT an implementation of the {@link IJobDagAction} interface
*/
@Slf4j
public final class JobDagActions {
public static final String RESULT_METRIC = "result";
public static final String TIME_METRIC = "execution_time";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;

@Slf4j
@AllArgsConstructor
/**
* {@link ReporterAction} is an implemetation of the {@link IJobDagAction} interface and is used to
* report metrics
*/
@Slf4j
@AllArgsConstructor
public class ReporterAction implements IJobDagAction {

public static int DEFAULT_TIMEOUT_SECONDS = 120;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;

@Slf4j
/**
* {@link LockManagerConfiguration} defines configurations taking locks on jobs via ZooKeeper
*
* All properties start with {@link #LOCK_MANAGER_PREFIX}.
*/
@Slf4j
public class LockManagerConfiguration {

public static final String LOCK_MANAGER_PREFIX = Configuration.MARMARAY_PREFIX + "lock_manager.";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
* This class converts records from ({@link Schema}, {@link AvroPayload}) to (OS, OD).
* @param <OS> output schema type
* @param <OD> output data type
*/
*/
public abstract class SinkDataConverter<OS, OD> extends AbstractDataConverter<Schema, OS, AvroPayload, OD> {
private static final long serialVersionUID = 1L;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
*/
package com.uber.marmaray.common.exceptions;

import com.uber.marmaray.common.AvroPayload;
import com.uber.marmaray.common.converters.data.HoodieSinkDataConverter;
import lombok.NonNull;
import org.apache.avro.generic.GenericRecord;
import org.hibernate.validator.constraints.NotEmpty;

/**
* It is a checked exception and should be thrown when there is either missing or invalid user defined field in
* data. Check {@link HoodieSinkDataConverter#getKey(GenericRecord)}
* data. Check {@link HoodieSinkDataConverter#getRecordKey(AvroPayload)}
* for an example.
*/
public class InvalidDataException extends Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
/**
* This is the parent runtime exception thrown whenever job encounters unrecoverable exception.
*/

public class JobRuntimeException extends RuntimeException {
public JobRuntimeException(@NotEmpty final String message) {
super(message);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
/**
* Thrown when any required property for a module is not specified.
*/

public class MissingPropertyException extends JobRuntimeException {
public MissingPropertyException(final String propertyName) {
super("property:" + propertyName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,13 @@
import java.util.Queue;
import java.util.concurrent.TimeUnit;

@Slf4j
/**
* {@link ExecutionTimeJobExecutionStrategy} is a {@link IJobExecutionStrategy} that sorts the {@link JobDag}s by
* execution time and latest completion, so DAGs that have not finished will run first, followed by jobs sorted by
* execution time descending.
* This order should help prevent individual long-running dags from keeping the execution from completing on time.
*/
@Slf4j
public class ExecutionTimeJobExecutionStrategy implements IJobExecutionStrategy {

public static final int DEFAULT_LAST_EXECUTION_TIME_THRESHOLD_HOURS = 6;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,12 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;

@Slf4j
/**
* {@link HDFSMetadataManager} implements the {@link IMetadataManager} interface, adding the capability
* to put and retrieve generic metadata from HDFS. All metadata will be stored under a single filename
* with the name = System.currentTimeMillis() when {@link IMetadataManager#saveChanges()} is invoked
*/
@Slf4j
public class HDFSMetadataManager implements IMetadataManager<StringValue> {
public static final int DEFAULT_NUM_METADATA_FILES_TO_RETAIN = 5;
private static final int SERIALIZATION_VERSION = 1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
* Uses {@link HDFSMetadataManager} internally to interact with the backend if sourceType is set to HDFS
*
*/

@Slf4j
public class JobManagerMetadataTracker {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;

@AllArgsConstructor
@EqualsAndHashCode(callSuper = false)
/**
* {@link StringValue} extends {@link AbstractValue} and wraps a String that represents the job metadata
* that will be stored in HDFS
*/
@AllArgsConstructor
@EqualsAndHashCode(callSuper = false)
public class StringValue extends AbstractValue<String> {

private final String value;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
/**
* {@link IFunctionThrowsException} is the interface for an function throwing exceptions.
*/

@FunctionalInterface
public interface IFunctionThrowsException<T, R> {
R apply(@NonNull final T t) throws Exception;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
* {@link IRetryStrategy} determines if a function should be retried or not. retryMessage returns
* the description of the current attempt.
*/

public interface IRetryStrategy {
boolean shouldRetry() throws RetryException;
String retryMessage();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
* It gets work units from {@link KafkaWorkUnitCalculatorResult} as a list of {@link OffsetRange}, reads messages from
* kafka and returns {@link JavaRDD<AvroPayload>}.
*/

@Slf4j
@AllArgsConstructor
public class KafkaSource implements ISource<KafkaWorkUnitCalculatorResult, KafkaRunState>, Serializable {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@

import java.util.List;

@Slf4j
/**
* {@link ConfigUtil} provides utility methods for job configurations
*/
@Slf4j
public final class ConfigUtil {

private ConfigUtil() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,10 @@

import static com.uber.marmaray.utilities.DateUtil.DATE_PARTITION_FORMAT;

@Slf4j
/**
* {@link ErrorTableUtil} defines utility methods to interact with the error tables
*/
@Slf4j
public final class ErrorTableUtil {

public static final String HADOOP_ROW_KEY = "Hadoop_Row_Key";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

@Slf4j
/**
* {@link FSUtils} defines utility methods with interacting with a filesystem
*/
@Slf4j
public class FSUtils {

// Metadata file names in HDFS = nanoseconds since epoch so we can sort by name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@
import java.util.List;
import java.util.Map;

@Slf4j
/**
* {@link GenericRecordUtil} defines utility methods for working with Generic Records
*/
@Slf4j
public final class GenericRecordUtil {

private GenericRecordUtil() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;

@Slf4j
/**
* {@link KafkaUtil} provides utility methods for interacting with Kafka
*/
@Slf4j
public final class KafkaUtil {

public static final int FETCH_OFFSET_TIMEOUT_SEC = 60;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
* name. {@link #batchLock(List, String)} acquire locks a list of lock name strings. It only
* succeeds and keeps all the locks if all the acquiring were successful.
*/

@Slf4j
public class LockManager implements AutoCloseable {
@NonNull
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@
import lombok.extern.slf4j.Slf4j;
import org.hibernate.validator.constraints.NotEmpty;

@Slf4j
/**
* {@link MapUtil} defines utility methods for working with maps
*/
@Slf4j
public class MapUtil {

public static final String KEY_VALUE_SEPARATOR = StringTypes.COLON;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@

import java.io.IOException;

@Slf4j
/**
* {@link SchemaUtil} defines utility methods for working with schemas
*/
@Slf4j
public final class SchemaUtil {

public static final String DISPERSAL_TIMESTAMP = "dispersal_timestamp";
Expand All @@ -51,7 +51,7 @@ private SchemaUtil() {
* This utility method will iterate through a directory containing parquet files, find the first file,
* and only read in the Parquet metadata and convert the parquet schema to the equivalent Spark StructType.
*
* Thsis method is useful because it does not require reading in all the data into memory to determine the schema
* This method is useful because it does not require reading in all the data into memory to determine the schema
* and only reads in the required metadata located in the footer
* @param parquetDir
* @return StructType equivalent of the parquet schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@

import java.io.Serializable;

@ToString
@AllArgsConstructor
/**
* {@link TimestampInfo} contains timestamp information, either as a String or a long
*/
@ToString
@AllArgsConstructor
public class TimestampInfo implements Serializable {
@Getter
final Optional<String> timestamp;
Expand Down

0 comments on commit afef8a8

Please sign in to comment.