Instance Constructors
-
new
SparkDataCollection(applicationEventListener: ApplicationEventListener, jobProgressListener: JobProgressListener, storageStatusListener: StorageStatusListener, environmentListener: EnvironmentListener, executorsListener: ExecutorsListener, storageListener: StorageListener, storageStatusTrackingListener: StorageStatusTrackingListener)
Value Members
-
final
def
!=(arg0: AnyRef): Boolean
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: AnyRef): Boolean
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
def
getAppId(): String
-
-
final
def
getClass(): Class[_]
-
def
getConf(): Properties
-
-
-
-
-
-
def
hashCode(): Int
-
def
isEmpty(): Boolean
-
final
def
isInstanceOf[T0]: Boolean
-
def
isThrottled(): Boolean
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
throttle(): Unit
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Inherited from AnyRef
Inherited from Any
This class wraps the logic of collecting the data in SparkEventListeners into the HadoopApplicationData instances.
Notice: This has to live in Spark's scope because ApplicationEventListener is in private[spark] scope. And it is problematic to compile if written in Java.