public abstract class SparkStreamingApp extends Object implements Serializable
Modifier and Type | Field and Description |
---|---|
private String |
appName
|
private org.apache.spark.streaming.api.java.JavaStreamingContext
|
jssc
Common Spark Streaming context variable.
|
private Map<String,Object> |
kafkaParams
Kafka parameters for creating input streams pulling messages from Kafka
Brokers.
|
protected Singleton<Logger> |
loggerSingleton
|
private Singleton<MonitorThread> |
monitorSingleton
|
private SystemPropertyCenter |
propCenter
|
private static long |
serialVersionUID
|
private List<Stream> |
streams
|
private Singleton<TaskController> |
taskController
|
Constructor and Description |
---|
SparkStreamingApp(SystemPropertyCenter propCenter,
String appName)
|
Modifier and Type | Method and Description |
---|---|
abstract void |
addToContext()
Add streaming actions directly to the global streaming context.
|
void |
awaitTermination()
Await termination of the application.
|
protected org.apache.spark.streaming.api.java.JavaPairDStream<DataType,scala.Tuple2<String,byte[]>> |
buildDirectStream(Collection<DataType> acceptingTypes)
Utility function for all applications to receive messages with byte
array values from Kafka with direct stream.
|
protected org.apache.spark.streaming.api.java.JavaPairDStream<DataType,scala.Tuple2<String,byte[]>> |
buildDirectStream(Collection<DataType> acceptingTypes,
int repartition)
Utility function for all applications to receive messages with byte
array values from Kafka with direct stream.
|
protected void |
finalize()
|
void |
initialize()
Initialize the application.
|
protected void |
registerStreams(Collection<Stream> streams)
|
void |
start()
Start the application.
|
void |
stop()
Stop the application.
|
private static final long serialVersionUID
@Nonnull private SystemPropertyCenter propCenter
@Nonnull private final Map<String,Object> kafkaParams
private transient org.apache.spark.streaming.api.java.JavaStreamingContext jssc
@Nonnull private final Singleton<MonitorThread> monitorSingleton
@Nullable private Singleton<TaskController> taskController
protected void registerStreams(Collection<Stream> streams)
@Nonnull protected org.apache.spark.streaming.api.java.JavaPairDStream<DataType,scala.Tuple2<String,byte[]>> buildDirectStream(@Nonnull Collection<DataType> acceptingTypes, int repartition) throws org.apache.spark.SparkException
acceptingTypes
- Data types the stream accepts.repartition
- Number of partitions when repartitioning the RDDs.
-1 means do not do repartition. 0 means using default parallelism of Spark.
org.apache.spark.SparkException
@Nonnull protected org.apache.spark.streaming.api.java.JavaPairDStream<DataType,scala.Tuple2<String,byte[]>> buildDirectStream(@Nonnull Collection<DataType> acceptingTypes) throws org.apache.spark.SparkException
acceptingTypes
- Data types the stream accepts.org.apache.spark.SparkException
public abstract void addToContext() throws Exception
TaskData
.
Actions that take TaskData
as input should
be implemented in the
Stream.addToGlobalStream(Map)
,
in order to save time of deserialization.
Note that existence of Kafka topics used in this method is not automatically checked.
Exception
public void initialize()
public void start()
public void stop()
public void awaitTermination()