object
SparkSQLExecuteImpl
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
lazy val
STRING_AS_CLOB: Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
getClobProperties(session: SnappySession): (Boolean, Set[String])
-
def
getContextOrCurrentClassLoader: ClassLoader
-
def
getJsonProperties(session: SnappySession): Boolean
-
def
getRowIterator(dvds: Array[DataValueDescriptor], types: Array[Int], precisions: Array[Int], scales: Array[Int], dataTypes: Array[AnyRef], input: ByteArrayDataInput): Iterator[ValueRow]
-
def
getSQLType(dataType: DataType, complexTypeAsJson: Boolean, metaData: Metadata = Metadata.empty, metaName: String = "", allAsClob: Boolean = false, columnsAsClob: Set[String] = Set.empty): (Int, Int, Int)
-
def
getTableNamesAndNullability(session: SnappySession, output: Seq[Attribute]): (Seq[String], Seq[Boolean])
-
def
handleLocalExecution(srh: SnappyResultHolder, hdos: GfxdHeapDataOutputStream): Unit
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
def
serializeRows(out: DataOutput, hasMetadata: Boolean, hdos: GfxdHeapDataOutputStream): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
-
def
writeMetaData(srh: SnappyResultHolder, hdos: GfxdHeapDataOutputStream, tableNames: Seq[String], nullability: Seq[Boolean], columnNames: Array[String], colTypes: Array[(Int, Int, Int)], dataTypes: Array[DataType], warnings: SQLWarning): Unit