case class Write[K1, K2, T](keyFieldPrimary: String, keyFieldSecondary: Option[String], compression: CompressionCodecName = ParquetTypeFileOperations.DefaultCompression, configuration: Configuration = new Configuration(), numBuckets: Integer = null, numShards: Int = SortedBucketIO.DEFAULT_NUM_SHARDS, filenamePrefix: String = SortedBucketIO.DEFAULT_FILENAME_PREFIX, hashType: HashType = SortedBucketIO.DEFAULT_HASH_TYPE, outputDirectory: ResourceId = null, tempDirectory: ResourceId = null, filenameSuffix: String = DefaultSuffix, sorterMemoryMb: Int = SortedBucketIO.DEFAULT_SORTER_MEMORY_MB, keyCacheSize: Int = 0)(implicit evidence$23: ClassTag[K1], evidence$24: ClassTag[K2], evidence$25: ClassTag[T], evidence$26: Coder[T], evidence$27: ParquetType[T]) extends SortedBucketIO.Write[K1, K2, T] with Product with Serializable
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- Write
- Product
- Equals
- Write
- PTransform
- HasDisplayData
- Serializable
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- Protected
Instance Constructors
- new Write(keyFieldPrimary: String, keyFieldSecondary: Option[String], compression: CompressionCodecName = ParquetTypeFileOperations.DefaultCompression, configuration: Configuration = new Configuration(), numBuckets: Integer = null, numShards: Int = SortedBucketIO.DEFAULT_NUM_SHARDS, filenamePrefix: String = SortedBucketIO.DEFAULT_FILENAME_PREFIX, hashType: HashType = SortedBucketIO.DEFAULT_HASH_TYPE, outputDirectory: ResourceId = null, tempDirectory: ResourceId = null, filenameSuffix: String = DefaultSuffix, sorterMemoryMb: Int = SortedBucketIO.DEFAULT_SORTER_MEMORY_MB, keyCacheSize: Int = 0)(implicit arg0: ClassTag[K1], arg1: ClassTag[K2], arg2: ClassTag[T], arg3: Coder[T], arg4: ParquetType[T])
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- def addAnnotation(annotationType: String, annotation: Array[Byte]): PTransform[PCollection[T], WriteResult]
- Definition Classes
- PTransform
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- val compression: CompressionCodecName
- val configuration: Configuration
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def expand(input: PCollection[T]): WriteResult
- Definition Classes
- Write → PTransform
- Annotations
- @SuppressWarnings() @Override()
- val filenamePrefix: String
- val filenameSuffix: String
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- def getAdditionalInputs(): Map[TupleTag[_ <: AnyRef], PValue]
- Definition Classes
- PTransform
- def getAnnotations(): Map[String, Array[Byte]]
- Definition Classes
- PTransform
- def getBucketMetadata(): BucketMetadata[K1, K2, T]
- Definition Classes
- Write → Write
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getFileOperations(): FileOperations[T]
- Definition Classes
- Write → Write
- def getFilenamePrefix(): String
- Definition Classes
- Write → Write
- def getFilenameSuffix(): String
- Definition Classes
- Write → Write
- def getHashType(): HashType
- Definition Classes
- Write → Write
- def getKeyCacheSize(): Int
- Definition Classes
- Write → Write
- def getKeyClassPrimary(): Class[K1]
- Definition Classes
- Write → Write
- def getKeyClassSecondary(): Class[K2]
- Definition Classes
- Write → Write
- def getKindString(): String
- Attributes
- protected[transforms]
- Definition Classes
- PTransform
- def getName(): String
- Definition Classes
- PTransform
- def getNumBuckets(): Integer
- Definition Classes
- Write → Write
- def getNumShards(): Int
- Definition Classes
- Write → Write
- def getOutputDirectory(): ResourceId
- Definition Classes
- Write → Write
- def getResourceHints(): ResourceHints
- Definition Classes
- PTransform
- def getSorterMemoryMb(): Int
- Definition Classes
- Write → Write
- def getTempDirectory(): ResourceId
- Definition Classes
- Write → Write
- val hashType: HashType
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- val keyCacheSize: Int
- val keyFieldPrimary: String
- val keyFieldSecondary: Option[String]
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- val numBuckets: Integer
- val numShards: Int
- def onKeyedCollection(valueCoder: Coder[T], verifyKeyExtraction: Boolean): PreKeyedWrite[K1, T]
- Definition Classes
- Write
- val outputDirectory: ResourceId
- def populateDisplayData(builder: Builder): Unit
- Definition Classes
- PTransform → HasDisplayData
- def productElementNames: Iterator[String]
- Definition Classes
- Product
- def setDisplayData(displayData: List[ItemSpec[_ <: AnyRef]]): PTransform[PCollection[T], WriteResult]
- Definition Classes
- PTransform
- def setResourceHints(resourceHints: ResourceHints): PTransform[PCollection[T], WriteResult]
- Definition Classes
- PTransform
- val sorterMemoryMb: Int
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- val tempDirectory: ResourceId
- def to(outputDirectory: String): Write[K1, K2, T]
- def toString(): String
- Definition Classes
- PTransform → AnyRef → Any
- Annotations
- @SideEffectFree()
- def validate(options: PipelineOptions, inputs: Map[TupleTag[_ <: AnyRef], PCollection[_ <: AnyRef]], outputs: Map[TupleTag[_ <: AnyRef], PCollection[_ <: AnyRef]]): Unit
- Definition Classes
- PTransform
- def validate(options: PipelineOptions): Unit
- Definition Classes
- PTransform
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()
- def withCompression(compression: CompressionCodecName): Write[K1, K2, T]
- def withConfiguration(configuration: Configuration): Write[K1, K2, T]
- def withFilenamePrefix(filenamePrefix: String): Write[K1, K2, T]
- def withHashType(hashType: HashType): Write[K1, K2, T]
- def withKeyCacheOfSize(keyCacheSize: Int): Write[K1, K2, T]
- def withNumBuckets(numBuckets: Int): Write[K1, K2, T]
- def withNumShards(numShards: Int): Write[K1, K2, T]
- def withSorterMemoryMb(sorterMemoryMb: Int): Write[K1, K2, T]
- def withSuffix(filenameSuffix: String): Write[K1, K2, T]
- def withTempDirectory(tempDirectory: String): Write[K1, K2, T]
Deprecated Value Members
- def getDefaultOutputCoder[T <: AnyRef](input: PCollection[T], output: PCollection[T]): Coder[T]
- Definition Classes
- PTransform
- Annotations
- @throws(classOf[org.apache.beam.sdk.coders.CannotProvideCoderException]) @Deprecated
- Deprecated
- def getDefaultOutputCoder(input: PCollection[T]): Coder[_ <: AnyRef]
- Attributes
- protected[transforms]
- Definition Classes
- PTransform
- Annotations
- @throws(classOf[org.apache.beam.sdk.coders.CannotProvideCoderException]) @Deprecated
- Deprecated
- def getDefaultOutputCoder(): Coder[_ <: AnyRef]
- Attributes
- protected[transforms]
- Definition Classes
- PTransform
- Annotations
- @throws(classOf[org.apache.beam.sdk.coders.CannotProvideCoderException]) @Deprecated
- Deprecated