object SnowflakeConnectorUtils
Connector utils, including what needs to be invoked to enable pushdowns.
- Alphabetic
- By Inheritance
- SnowflakeConnectorUtils
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- val SUPPORT_SPARK_VERSION: String
Check Spark version, if Spark version matches SUPPORT_SPARK_VERSION enable PushDown, otherwise disable it.
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def checkVersionAndEnablePushdown(session: SparkSession): Boolean
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native() @HotSpotIntrinsicCandidate()
- def disablePushdownSession(session: SparkSession): Unit
Disable more advanced query pushdowns to Snowflake.
Disable more advanced query pushdowns to Snowflake.
- session
The SparkSession for which pushdowns are to be disabled.
- def disableSharingJDBCConnection(): Unit
Disable the JDBC connection sharing optimization.
Disable the JDBC connection sharing optimization. If the JDBC connection sharing is enabled, the spark connector will attempt to re-use the JDBC connection if the spark connector options are the same.
- def enablePushdownSession(session: SparkSession): Unit
Enable more advanced query pushdowns to Snowflake.
Enable more advanced query pushdowns to Snowflake.
- session
The SparkSession for which pushdowns are to be enabled.
- def enableSharingJDBCConnection(): Unit
Enable the JDBC connection sharing optimization.
Enable the JDBC connection sharing optimization. If the JDBC connection sharing is enabled, the spark connector will attempt to re-use the JDBC connection if the spark connector options are the same.
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- def handleS3Exception(ex: Exception): Unit
- Annotations
- @throws(scala.this.throws.<init>$default$1[net.snowflake.spark.snowflake.SnowflakeConnectorException])
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- lazy val log: Logger
- Annotations
- @transient()
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- def setPushdownSession(session: SparkSession, enabled: Boolean): Unit
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- AnyRef → Any
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
Deprecated Value Members
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable]) @Deprecated
- Deprecated