Packages

class SQLConf extends Serializable with Logging

A class that enables the setting and getting of mutable config parameters/hints.

In the presence of a SQLContext, these can be set and queried by passing SET commands into Spark SQL's query functions (i.e. sql()). Otherwise, users of this class can modify the hints by programmatically calling the setters and getters of this class.

SQLConf is thread-safe (internally synchronized, so safe to be used in multiple threads).

Linear Supertypes
Logging, Serializable, Serializable, AnyRef, Any
Known Subclasses
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. SQLConf
  2. Logging
  3. Serializable
  4. Serializable
  5. AnyRef
  6. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SQLConf()

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def adaptiveExecutionEnabled: Boolean
  5. def adaptiveExecutionLogLevel: String
  6. def addSingleFileInAddFile: Boolean
  7. def advancedPartitionPredicatePushdownEnabled: Boolean
  8. def allowAutoGeneratedAliasForView: Boolean
  9. def allowNegativeScaleOfDecimalEnabled: Boolean
  10. def allowNonEmptyLocationInCTAS: Boolean
  11. def allowStarWithSingleTableIdentifierInCount: Boolean
  12. def analyzerMaxIterations: Int

    ************************ Spark SQL Params/Hints *******************

  13. def ansiEnabled: Boolean
  14. def arrowMaxRecordsPerBatch: Int
  15. def arrowPySparkEnabled: Boolean
  16. def arrowPySparkFallbackEnabled: Boolean
  17. def arrowPySparkSelfDestructEnabled: Boolean
  18. def arrowSafeTypeConversion: Boolean
  19. def arrowSparkREnabled: Boolean
  20. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  21. def autoBroadcastJoinThreshold: Long
  22. def autoBucketedScanEnabled: Boolean
  23. def autoSizeUpdateEnabled: Boolean
  24. def avroCompressionCodec: String
  25. def avroDeflateLevel: Int
  26. def avroFilterPushDown: Boolean
  27. def broadcastHashJoinOutputPartitioningExpandLimit: Int
  28. def broadcastTimeout: Long
  29. def bucketingEnabled: Boolean
  30. def bucketingMaxBuckets: Int
  31. def cacheVectorizedReaderEnabled: Boolean
  32. def cartesianProductExecBufferInMemoryThreshold: Int
  33. def cartesianProductExecBufferSpillThreshold: Int
  34. def caseSensitiveAnalysis: Boolean
  35. def caseSensitiveInferenceMode: SQLConf.HiveCaseSensitiveInferenceMode.Value
  36. def castDatetimeToString: Boolean
  37. def cboEnabled: Boolean
  38. def charVarcharAsString: Boolean
  39. def checkpointLocation: Option[String]
  40. def clear(): Unit
  41. def cliPrintHeader: Boolean
  42. def clone(): SQLConf
    Definition Classes
    SQLConf → AnyRef
  43. def coalesceBucketsInJoinEnabled: Boolean
  44. def coalesceBucketsInJoinMaxBucketRatio: Int
  45. def coalesceShufflePartitionsEnabled: Boolean
  46. def codegenCacheMaxEntries: Int
  47. def codegenComments: Boolean
  48. def codegenFallback: Boolean
  49. def codegenSplitAggregateFunc: Boolean
  50. def columnBatchSize: Int
  51. def columnNameOfCorruptRecord: String
  52. def concatBinaryAsString: Boolean
  53. def constraintPropagationEnabled: Boolean
  54. def contains(key: String): Boolean

    Return whether a given key is set in this SQLConf.

  55. def continuousStreamingEpochBacklogQueueSize: Int
  56. def continuousStreamingExecutorPollIntervalMs: Long
  57. def continuousStreamingExecutorQueueSize: Int
  58. def convertCTAS: Boolean
  59. def copy(entries: (ConfigEntry[_], Any)*): SQLConf
  60. def crossJoinEnabled: Boolean
  61. def csvColumnPruning: Boolean
  62. def csvExpressionOptimization: Boolean
  63. def csvFilterPushDown: Boolean
  64. def dataFramePivotMaxValues: Int
  65. def dataFrameRetainGroupColumns: Boolean
  66. def dataFrameSelfJoinAutoResolveAmbiguity: Boolean
  67. def datetimeJava8ApiEnabled: Boolean
  68. def decimalOperationsAllowPrecisionLoss: Boolean
  69. def decorrelateInnerQueryEnabled: Boolean
  70. def defaultDataSourceName: String
  71. def defaultNumShufflePartitions: Int
  72. def defaultSizeInBytes: Long
  73. def disabledJdbcConnectionProviders: String
  74. def disabledV2StreamingMicroBatchReaders: String
  75. def disabledV2StreamingWriters: String
  76. def dynamicPartitionPruningEnabled: Boolean
  77. def dynamicPartitionPruningFallbackFilterRatio: Double
  78. def dynamicPartitionPruningReuseBroadcastOnly: Boolean
  79. def dynamicPartitionPruningUseStats: Boolean
  80. def eltOutputAsString: Boolean
  81. def enableRadixSort: Boolean
  82. def enableTwoLevelAggMap: Boolean
  83. def enableVectorizedHashMap: Boolean
  84. def enforceReservedKeywords: Boolean
  85. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  86. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  87. def escapedStringLiterals: Boolean
  88. def exchangeReuseEnabled: Boolean
  89. def exponentLiteralAsDecimalEnabled: Boolean
  90. def fallBackToHdfsForStatsEnabled: Boolean
  91. def fastFailFileFormatOutput: Boolean
  92. def fastHashAggregateRowMaxCapacityBit: Int
  93. def fetchShuffleBlocksInBatch: Boolean
  94. def fileCommitProtocolClass: String
  95. def fileCompressionFactor: Double
  96. def fileSinkLogCleanupDelay: Long
  97. def fileSinkLogCompactInterval: Int
  98. def fileSinkLogDeletion: Boolean
  99. def fileSourceLogCleanupDelay: Long
  100. def fileSourceLogCompactInterval: Int
  101. def fileSourceLogDeletion: Boolean
  102. def fileStreamSinkMetadataIgnored: Boolean
  103. def filesMaxPartitionBytes: Long
  104. def filesMinPartitionNum: Option[Int]
  105. def filesOpenCostInBytes: Long
  106. def filesourcePartitionFileCacheSize: Long
  107. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  108. def gatherFastStats: Boolean
  109. def getAllConfs: Map[String, String]

    Return all the configuration properties that have been set (i.e.

    Return all the configuration properties that have been set (i.e. not the default). This creates a new copy of the config properties in the form of a Map.

  110. def getAllDefinedConfs: Seq[(String, String, String, String)]

    Return all the configuration definitions that have been defined in SQLConf.

    Return all the configuration definitions that have been defined in SQLConf. Each definition contains key, defaultValue and doc.

  111. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  112. def getConf[T](entry: OptionalConfigEntry[T]): Option[T]

    Return the value of an optional Spark SQL configuration property for the given key.

    Return the value of an optional Spark SQL configuration property for the given key. If the key is not set yet, returns None.

  113. def getConf[T](entry: ConfigEntry[T]): T

    Return the value of Spark SQL configuration property for the given key.

    Return the value of Spark SQL configuration property for the given key. If the key is not set yet, return defaultValue in ConfigEntry.

  114. def getConf[T](entry: ConfigEntry[T], defaultValue: T): T

    Return the value of Spark SQL configuration property for the given key.

    Return the value of Spark SQL configuration property for the given key. If the key is not set yet, return defaultValue. This is useful when defaultValue in ConfigEntry is not the desired one.

  115. def getConfString(key: String, defaultValue: String): String

    Return the string value of Spark SQL configuration property for the given key.

    Return the string value of Spark SQL configuration property for the given key. If the key is not set yet, return defaultValue.

  116. def getConfString(key: String): String

    Return the value of Spark SQL configuration property for the given key.

    Return the value of Spark SQL configuration property for the given key.

    Annotations
    @throws( "if key is not set" )
  117. def groupByAliases: Boolean
  118. def groupByOrdinal: Boolean
  119. def groupingIdWithAppendedUserGroupByEnabled: Boolean
  120. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  121. def hintErrorHandler: HintErrorHandler

    Returns the error handler for handling hint errors.

  122. def histogramEnabled: Boolean
  123. def histogramNumBins: Int
  124. def histogramNumericPropagateInputType: Boolean
  125. def hiveThriftServerSingleSession: Boolean
  126. def hugeMethodLimit: Int
  127. def ignoreCorruptFiles: Boolean
  128. def ignoreDataLocality: Boolean
  129. def ignoreMissingFiles: Boolean
  130. def ignoreMissingParquetFieldId: Boolean
  131. def inMemoryPartitionPruning: Boolean
  132. def inMemoryTableScanStatisticsEnabled: Boolean
  133. def inferDictAsStruct: Boolean
  134. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  135. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  136. def integerGroupingIdEnabled: Boolean
  137. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  138. def isModifiable(key: String): Boolean
  139. def isOrcSchemaMergingEnabled: Boolean
  140. def isParquetBinaryAsString: Boolean
  141. def isParquetINT96AsTimestamp: Boolean
  142. def isParquetINT96TimestampConversion: Boolean
  143. def isParquetSchemaMergingEnabled: Boolean
  144. def isParquetSchemaRespectSummaries: Boolean
  145. def isReplEagerEvalEnabled: Boolean
  146. def isStateSchemaCheckEnabled: Boolean
  147. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  148. def isUnsupportedOperationCheckEnabled: Boolean
  149. def joinReorderCardWeight: Double
  150. def joinReorderDPStarFilter: Boolean
  151. def joinReorderDPThreshold: Int
  152. def joinReorderEnabled: Boolean
  153. def jsonExpressionOptimization: Boolean
  154. def jsonFilterPushDown: Boolean
  155. def jsonGeneratorIgnoreNullFields: Boolean
  156. def legacyIntervalEnabled: Boolean
  157. def legacyMsSqlServerNumericMappingEnabled: Boolean
  158. def legacyParquetNanosAsLong: Boolean
  159. def legacyPathOptionBehavior: Boolean
  160. def legacySizeOfNull: Boolean
  161. def legacyStatisticalAggregate: Boolean
  162. def legacyTimeParserPolicy: SQLConf.LegacyBehaviorPolicy.Value
  163. def limitScaleUpFactor: Int
  164. def literalPickMinimumPrecision: Boolean
  165. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  166. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  167. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  168. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  169. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  170. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  171. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  172. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  173. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  174. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  175. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  176. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  177. def loggingMaxLinesForCodegen: Int
  178. def manageFilesourcePartitions: Boolean
  179. def maxBatchesToRetainInMemory: Int
  180. def maxConcurrentOutputFileWriters: Int
  181. def maxMetadataStringLength: Int
  182. def maxNestedViewDepth: Int
  183. def maxPlanStringLength: Int
  184. def maxRecordsPerFile: Long
  185. def maxToStringFields: Int
  186. def metadataCacheTTL: Long
  187. def metastorePartitionPruning: Boolean
  188. def metastorePartitionPruningFallbackOnException: Boolean
  189. def metastorePartitionPruningFastFallback: Boolean
  190. def metastorePartitionPruningInSetThreshold: Int
  191. def methodSplitThreshold: Int
  192. def minBatchesToRetain: Int
  193. def nameNonStructGroupingKeyAsValue: Boolean
  194. def ndvMaxError: Double
  195. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  196. def nestedPruningOnExpressions: Boolean
  197. def nestedSchemaPruningEnabled: Boolean
  198. def nonEmptyPartitionRatioForBroadcastJoin: Double
  199. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  200. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  201. def numShufflePartitions: Int
  202. def objectAggSortBasedFallbackThreshold: Int
  203. def offHeapColumnVectorEnabled: Boolean
  204. def optimizeNullAwareAntiJoin: Boolean
  205. def optimizerExcludedRules: Option[String]
  206. def optimizerInSetConversionThreshold: Int
  207. def optimizerInSetSwitchThreshold: Int
  208. def optimizerMaxIterations: Int
  209. def optimizerMetadataOnly: Boolean
  210. def orcAggregatePushDown: Boolean
  211. def orcCompressionCodec: String
  212. def orcFilterPushDown: Boolean
  213. def orcVectorizedReaderBatchSize: Int
  214. def orcVectorizedReaderEnabled: Boolean
  215. def orcVectorizedReaderNestedColumnEnabled: Boolean
  216. def orderByOrdinal: Boolean
  217. def pandasGroupedMapAssignColumnsByName: Boolean
  218. def pandasUDFBufferSize: Int
  219. def parallelFileListingInStatsComputation: Boolean
  220. def parallelPartitionDiscoveryParallelism: Int
  221. def parallelPartitionDiscoveryThreshold: Int
  222. def parquetAggregatePushDown: Boolean
  223. def parquetCompressionCodec: String
  224. def parquetFieldIdReadEnabled: Boolean
  225. def parquetFieldIdWriteEnabled: Boolean
  226. def parquetFilterPushDown: Boolean
  227. def parquetFilterPushDownDate: Boolean
  228. def parquetFilterPushDownDecimal: Boolean
  229. def parquetFilterPushDownInFilterThreshold: Int
  230. def parquetFilterPushDownStringStartWith: Boolean
  231. def parquetFilterPushDownTimestamp: Boolean
  232. def parquetOutputCommitterClass: String
  233. def parquetOutputTimestampType: SQLConf.ParquetOutputTimestampType.Value
  234. def parquetRecordFilterEnabled: Boolean
  235. def parquetVectorizedReaderBatchSize: Int
  236. def parquetVectorizedReaderEnabled: Boolean
  237. def parquetVectorizedReaderNestedColumnEnabled: Boolean
  238. def partitionColumnTypeInferenceEnabled: Boolean
  239. def partitionOverwriteMode: SQLConf.PartitionOverwriteMode.Value
  240. def percentileAccuracy: Int
  241. def planChangeBatches: Option[String]
  242. def planChangeLogLevel: String
  243. def planChangeRules: Option[String]
  244. def planStatsEnabled: Boolean
  245. def preferSortMergeJoin: Boolean
  246. def pysparkJVMStacktraceEnabled: Boolean
  247. def pysparkSimplifiedTraceback: Boolean
  248. def rangeExchangeSampleSizePerPartition: Int
  249. val reader: ConfigReader
    Attributes
    protected
  250. def redactOptions[K, V](options: Seq[(K, V)]): Seq[(K, V)]

    Redacts the given option map according to the description of SQL_OPTIONS_REDACTION_PATTERN.

  251. def redactOptions[K, V](options: Map[K, V]): Map[K, V]

    Redacts the given option map according to the description of SQL_OPTIONS_REDACTION_PATTERN.

  252. def replEagerEvalMaxNumRows: Int
  253. def replEagerEvalTruncate: Int
  254. def replaceDatabricksSparkAvroEnabled: Boolean
  255. def replaceExceptWithFilter: Boolean
  256. def resolver: Resolver

    Returns the Resolver for the current configuration, which can be used to determine if two identifiers are equal.

  257. def runSQLonFile: Boolean
  258. def runtimeFilterBloomFilterEnabled: Boolean
  259. def runtimeFilterCreationSideThreshold: Long
  260. def runtimeFilterSemiJoinReductionEnabled: Boolean
  261. def serializerNestedSchemaPruningEnabled: Boolean
  262. def sessionLocalTimeZone: String
  263. def sessionWindowBufferInMemoryThreshold: Int
  264. def sessionWindowBufferSpillThreshold: Int
  265. def setCommandRejectsSparkCoreConfs: Boolean
  266. def setConf[T](entry: ConfigEntry[T], value: T): Unit

    Set the given Spark SQL configuration property.

  267. def setConf(props: Properties): Unit

    Set Spark SQL configuration properties.

  268. def setConfString(key: String, value: String): Unit

    Set the given Spark SQL configuration property using a string value.

  269. def setConfWithCheck(key: String, value: String): Unit
    Attributes
    protected
  270. def setOpsPrecedenceEnforced: Boolean
  271. val settings: Map[String, String]

    Only low degree of contention is expected for conf, thus NOT using ConcurrentHashMap.

    Only low degree of contention is expected for conf, thus NOT using ConcurrentHashMap.

    Attributes
    protected[spark]
  272. def sortBeforeRepartition: Boolean
  273. def sortMergeJoinExecBufferInMemoryThreshold: Int
  274. def sortMergeJoinExecBufferSpillThreshold: Int
  275. def starSchemaDetection: Boolean
  276. def starSchemaFTRatio: Double
  277. def stateStoreCompressionCodec: String
  278. def stateStoreFormatValidationEnabled: Boolean
  279. def stateStoreMinDeltasForSnapshot: Int
  280. def stateStoreProviderClass: String
  281. def stateStoreSkipNullsForStreamStreamJoins: Boolean
  282. def statefulOperatorCorrectnessCheckEnabled: Boolean
  283. def storeAnalyzedPlanForView: Boolean
  284. def storeAssignmentPolicy: SQLConf.StoreAssignmentPolicy.Value
  285. def streamingFileCommitProtocolClass: String
  286. def streamingMaintenanceInterval: Long
  287. def streamingMetricsEnabled: Boolean
  288. def streamingNoDataMicroBatchesEnabled: Boolean
  289. def streamingNoDataProgressEventInterval: Long
  290. def streamingPollingDelay: Long
  291. def streamingProgressRetention: Int
  292. def streamingSchemaInference: Boolean
  293. def streamingSessionWindowMergeSessionInLocalPartition: Boolean
  294. def strictIndexOperator: Boolean
  295. def stringRedactionPattern: Option[Regex]
  296. def subexpressionEliminationCacheMaxEntries: Int
  297. def subexpressionEliminationEnabled: Boolean
  298. def subqueryReuseEnabled: Boolean
  299. def supportQuotedRegexColumnName: Boolean
  300. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  301. def tableRelationCacheSize: Int
  302. def timestampType: AtomicType
  303. def toString(): String
    Definition Classes
    AnyRef → Any
  304. def topKSortFallbackThreshold: Int
  305. def truncateTableIgnorePermissionAcl: Boolean
  306. def uiExplainMode: String
  307. def unsetConf(entry: ConfigEntry[_]): Unit
  308. def unsetConf(key: String): Unit
  309. def useCompression: Boolean
  310. def useCurrentSQLConfigsForView: Boolean
  311. def useDeprecatedKafkaOffsetFetching: Boolean
  312. def useObjectHashAggregation: Boolean
  313. def useV1Command: Boolean
  314. def v2BucketingEnabled: Boolean
  315. def validatePartitionColumns: Boolean
  316. def variableSubstituteEnabled: Boolean
  317. def verifyPartitionPath: Boolean
  318. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  319. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  320. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  321. def warehousePath: String
  322. def wholeStageEnabled: Boolean
  323. def wholeStageMaxNumFields: Int
  324. def wholeStageSplitConsumeFuncByOperator: Boolean
  325. def wholeStageUseIdInClassName: Boolean
  326. def windowExecBufferInMemoryThreshold: Int
  327. def windowExecBufferSpillThreshold: Int
  328. def writeLegacyParquetFormat: Boolean

Inherited from Logging

Inherited from Serializable

Inherited from Serializable

Inherited from AnyRef

Inherited from Any

Ungrouped