public class SparkMicroBatchStream
extends java.lang.Object
implements org.apache.spark.sql.connector.read.streaming.MicroBatchStream, org.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
Modifier and Type | Method and Description |
---|---|
void |
commit(org.apache.spark.sql.connector.read.streaming.Offset end) |
org.apache.spark.sql.connector.read.PartitionReaderFactory |
createReaderFactory() |
org.apache.spark.sql.connector.read.streaming.Offset |
deserializeOffset(java.lang.String json) |
org.apache.spark.sql.connector.read.streaming.ReadLimit |
getDefaultReadLimit() |
org.apache.spark.sql.connector.read.streaming.Offset |
initialOffset() |
org.apache.spark.sql.connector.read.streaming.Offset |
latestOffset() |
org.apache.spark.sql.connector.read.streaming.Offset |
latestOffset(org.apache.spark.sql.connector.read.streaming.Offset startOffset,
org.apache.spark.sql.connector.read.streaming.ReadLimit limit) |
org.apache.spark.sql.connector.read.InputPartition[] |
planInputPartitions(org.apache.spark.sql.connector.read.streaming.Offset start,
org.apache.spark.sql.connector.read.streaming.Offset end) |
void |
stop() |
public org.apache.spark.sql.connector.read.streaming.Offset latestOffset()
latestOffset
in interface org.apache.spark.sql.connector.read.streaming.MicroBatchStream
public org.apache.spark.sql.connector.read.InputPartition[] planInputPartitions(org.apache.spark.sql.connector.read.streaming.Offset start, org.apache.spark.sql.connector.read.streaming.Offset end)
planInputPartitions
in interface org.apache.spark.sql.connector.read.streaming.MicroBatchStream
public org.apache.spark.sql.connector.read.PartitionReaderFactory createReaderFactory()
createReaderFactory
in interface org.apache.spark.sql.connector.read.streaming.MicroBatchStream
public org.apache.spark.sql.connector.read.streaming.Offset initialOffset()
initialOffset
in interface org.apache.spark.sql.connector.read.streaming.SparkDataStream
public org.apache.spark.sql.connector.read.streaming.Offset deserializeOffset(java.lang.String json)
deserializeOffset
in interface org.apache.spark.sql.connector.read.streaming.SparkDataStream
public void commit(org.apache.spark.sql.connector.read.streaming.Offset end)
commit
in interface org.apache.spark.sql.connector.read.streaming.SparkDataStream
public void stop()
stop
in interface org.apache.spark.sql.connector.read.streaming.SparkDataStream
public org.apache.spark.sql.connector.read.streaming.Offset latestOffset(org.apache.spark.sql.connector.read.streaming.Offset startOffset, org.apache.spark.sql.connector.read.streaming.ReadLimit limit)
latestOffset
in interface org.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
public org.apache.spark.sql.connector.read.streaming.ReadLimit getDefaultReadLimit()
getDefaultReadLimit
in interface org.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl