final case class AppSettings(executionDateTime: EnrichedDT, referenceDateTime: EnrichedDT, allowNotifications: Boolean, allowSqlQueries: Boolean, aggregatedKafkaOutput: Boolean, enableCaseSensitivity: Boolean, errorDumpSize: Int, outputRepartition: Int, metricEngineAPI: MetricEngineAPI, checkFailureTolerance: CheckFailureTolerance, storageConfig: Option[StorageConfig], emailConfig: Option[EmailConfig], mattermostConfig: Option[MattermostConfig], streamConfig: StreamConfig, encryption: Option[Encryption], sparkConf: SparkConf, isLocal: Boolean, isShared: Boolean, doMigration: Boolean, applicationName: Option[String], prependVars: String, loggingLevel: Level, versionInfo: VersionInfo) extends Product with Serializable
Application settings
- executionDateTime
Job execution date-time (actual time when job is started)
- referenceDateTime
Reference date-time (for which the job is performed)
- allowNotifications
Enables notifications to be sent from DQ application
- allowSqlQueries
Enables SQL arbitrary queries in virtual sources
- aggregatedKafkaOutput
Enables sending aggregates messages for Kafka Targets (one per each target type, except checkAlerts where one message per checkAlert will be sent)
- enableCaseSensitivity
Enables columns case sensitivity
- errorDumpSize
Maximum number of errors to be collected per single metric.
- outputRepartition
Sets the number of partitions when writing outputs. By default writes single file.
- metricEngineAPI
Metric processor API used to process metrics: either Spark RDD or Spark DF.
- checkFailureTolerance
Returns the failure status if any of the checks fail.
- storageConfig
Configuration of connection to Data Quality Storage
- emailConfig
Configuration of connection to SMTP server
- mattermostConfig
Configuration of connection to Mattermost API
- streamConfig
Streaming settings (used in streaming applications only)
- encryption
Encryption settings
- sparkConf
Spark configuration parameters
- isLocal
Boolean flag indicating whether spark application must be run locally.
- isShared
Boolean flag indicating whether spark application running within shared spark context.
- doMigration
Boolean flag indication whether DQ storage database migration needs to be run prior result saving.
- applicationName
Name of Checkita Data Quality spark application
- prependVars
Multiline HOCON string with variables to be prepended to configuration files during their parsing.
- loggingLevel
Application logging level
- versionInfo
Information about application and configuration API versions.
- Alphabetic
- By Inheritance
- AppSettings
- Serializable
- Serializable
- Product
- Equals
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
AppSettings(executionDateTime: EnrichedDT, referenceDateTime: EnrichedDT, allowNotifications: Boolean, allowSqlQueries: Boolean, aggregatedKafkaOutput: Boolean, enableCaseSensitivity: Boolean, errorDumpSize: Int, outputRepartition: Int, metricEngineAPI: MetricEngineAPI, checkFailureTolerance: CheckFailureTolerance, storageConfig: Option[StorageConfig], emailConfig: Option[EmailConfig], mattermostConfig: Option[MattermostConfig], streamConfig: StreamConfig, encryption: Option[Encryption], sparkConf: SparkConf, isLocal: Boolean, isShared: Boolean, doMigration: Boolean, applicationName: Option[String], prependVars: String, loggingLevel: Level, versionInfo: VersionInfo)
- executionDateTime
Job execution date-time (actual time when job is started)
- referenceDateTime
Reference date-time (for which the job is performed)
- allowNotifications
Enables notifications to be sent from DQ application
- allowSqlQueries
Enables SQL arbitrary queries in virtual sources
- aggregatedKafkaOutput
Enables sending aggregates messages for Kafka Targets (one per each target type, except checkAlerts where one message per checkAlert will be sent)
- enableCaseSensitivity
Enables columns case sensitivity
- errorDumpSize
Maximum number of errors to be collected per single metric.
- outputRepartition
Sets the number of partitions when writing outputs. By default writes single file.
- metricEngineAPI
Metric processor API used to process metrics: either Spark RDD or Spark DF.
- checkFailureTolerance
Returns the failure status if any of the checks fail.
- storageConfig
Configuration of connection to Data Quality Storage
- emailConfig
Configuration of connection to SMTP server
- mattermostConfig
Configuration of connection to Mattermost API
- streamConfig
Streaming settings (used in streaming applications only)
- encryption
Encryption settings
- sparkConf
Spark configuration parameters
- isLocal
Boolean flag indicating whether spark application must be run locally.
- isShared
Boolean flag indicating whether spark application running within shared spark context.
- doMigration
Boolean flag indication whether DQ storage database migration needs to be run prior result saving.
- applicationName
Name of Checkita Data Quality spark application
- prependVars
Multiline HOCON string with variables to be prepended to configuration files during their parsing.
- loggingLevel
Application logging level
- versionInfo
Information about application and configuration API versions.
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- val aggregatedKafkaOutput: Boolean
- val allowNotifications: Boolean
- val allowSqlQueries: Boolean
- val applicationName: Option[String]
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- val checkFailureTolerance: CheckFailureTolerance
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
- val doMigration: Boolean
- val emailConfig: Option[EmailConfig]
- val enableCaseSensitivity: Boolean
- val encryption: Option[Encryption]
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- val errorDumpSize: Int
- val executionDateTime: EnrichedDT
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- val isLocal: Boolean
- val isShared: Boolean
- val loggingLevel: Level
- val mattermostConfig: Option[MattermostConfig]
- val metricEngineAPI: MetricEngineAPI
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- val outputRepartition: Int
- val prependVars: String
- val referenceDateTime: EnrichedDT
- val sparkConf: SparkConf
- val storageConfig: Option[StorageConfig]
- val streamConfig: StreamConfig
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
- val versionInfo: VersionInfo
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()