class
SnappyCompressionCodec extends CompressionCodec
Instance Constructors
-
new
SnappyCompressionCodec(conf: SparkConf)
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
def
compress(input: Array[Byte], inputLen: Int): Array[Byte]
-
def
compressedInputStream(s: InputStream): InputStream
-
def
compressedOutputStream(s: OutputStream): OutputStream
-
def
decompress(input: Array[Byte], inputOffset: Int, inputLen: Int, outputLen: Int): Array[Byte]
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
val
version: String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
:: DeveloperApi :: Snappy implementation of org.apache.spark.io.CompressionCodec. Block size can be configured by
spark.io.compression.snappy.blockSize
.The wire protocol for this codec is not guaranteed to be compatible across versions of Spark. This is intended for use as an internal compression utility within a single Spark application.