I do not think that it is available in Spark by default, but it is easy to implement it using ReceiverInputDStream.
import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream.ReceiverInputDStream import org.apache.spark.streaming.receiver.Receiver class InfiniteStreamInputDStream[T]( @transient ssc_ : StreamingContext, stream: Stream[T], storageLevel: StorageLevel ) extends ReceiverInputDStream[T](ssc_) { override def getReceiver(): Receiver[T] = { new InfiniteStreamReceiver(stream, storageLevel) } } class InfiniteStreamReceiver[T](stream: Stream[T], storageLevel: StorageLevel) extends Receiver[T](storageLevel) { // Stateful iterator private val streamIterator = stream.iterator private class ReadAndStore extends Runnable { def run(): Unit = { while (streamIterator.hasNext) { val next = streamIterator.next() store(next) } } } override def onStart(): Unit = { new Thread(new ReadAndStore).run() } override def onStop(): Unit = { } }
source share