lib/google/apis/dataplex_v1/classes.rb in google-apis-dataplex_v1-0.35.0 vs lib/google/apis/dataplex_v1/classes.rb in google-apis-dataplex_v1-0.36.0
- old
+ new
@@ -1163,10 +1163,15 @@
# DataProfileResult defines the output of DataProfileScan. Each field of the
# table will have field type specific profile result.
class GoogleCloudDataplexV1DataProfileResult
include Google::Apis::Core::Hashable
+ # The result of post scan actions of DataProfileScan job.
+ # Corresponds to the JSON property `postScanActionsResult`
+ # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataProfileResultPostScanActionsResult]
+ attr_accessor :post_scan_actions_result
+
# Contains name, type, mode and field type specific profile information.
# Corresponds to the JSON property `profile`
# @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataProfileResultProfile]
attr_accessor :profile
@@ -1184,16 +1189,61 @@
update!(**args)
end
# Update properties of this object
def update!(**args)
+ @post_scan_actions_result = args[:post_scan_actions_result] if args.key?(:post_scan_actions_result)
@profile = args[:profile] if args.key?(:profile)
@row_count = args[:row_count] if args.key?(:row_count)
@scanned_data = args[:scanned_data] if args.key?(:scanned_data)
end
end
+ # The result of post scan actions of DataProfileScan job.
+ class GoogleCloudDataplexV1DataProfileResultPostScanActionsResult
+ include Google::Apis::Core::Hashable
+
+ # The result of BigQuery export post scan action.
+ # Corresponds to the JSON property `bigqueryExportResult`
+ # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataProfileResultPostScanActionsResultBigQueryExportResult]
+ attr_accessor :bigquery_export_result
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @bigquery_export_result = args[:bigquery_export_result] if args.key?(:bigquery_export_result)
+ end
+ end
+
+ # The result of BigQuery export post scan action.
+ class GoogleCloudDataplexV1DataProfileResultPostScanActionsResultBigQueryExportResult
+ include Google::Apis::Core::Hashable
+
+ # Output only. Additional information about the BigQuery exporting.
+ # Corresponds to the JSON property `message`
+ # @return [String]
+ attr_accessor :message
+
+ # Output only. Execution state for the BigQuery exporting.
+ # Corresponds to the JSON property `state`
+ # @return [String]
+ attr_accessor :state
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @message = args[:message] if args.key?(:message)
+ @state = args[:state] if args.key?(:state)
+ end
+ end
+
# Contains name, type, mode and field type specific profile information.
class GoogleCloudDataplexV1DataProfileResultProfile
include Google::Apis::Core::Hashable
# List of fields with structural and profile information for each field.
@@ -1488,10 +1538,15 @@
# The specification for fields to include or exclude in data profile scan.
# Corresponds to the JSON property `includeFields`
# @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataProfileSpecSelectedFields]
attr_accessor :include_fields
+ # The configuration of post scan actions of DataProfileScan job.
+ # Corresponds to the JSON property `postScanActions`
+ # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataProfileSpecPostScanActions]
+ attr_accessor :post_scan_actions
+
# Optional. A filter applied to all rows in a single DataScan job. The filter
# needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL
# syntax. Example: col1 >= 0 AND col2 < 10
# Corresponds to the JSON property `rowFilter`
# @return [String]
@@ -1511,15 +1566,55 @@
# Update properties of this object
def update!(**args)
@exclude_fields = args[:exclude_fields] if args.key?(:exclude_fields)
@include_fields = args[:include_fields] if args.key?(:include_fields)
+ @post_scan_actions = args[:post_scan_actions] if args.key?(:post_scan_actions)
@row_filter = args[:row_filter] if args.key?(:row_filter)
@sampling_percent = args[:sampling_percent] if args.key?(:sampling_percent)
end
end
+ # The configuration of post scan actions of DataProfileScan job.
+ class GoogleCloudDataplexV1DataProfileSpecPostScanActions
+ include Google::Apis::Core::Hashable
+
+ # The configuration of BigQuery export post scan action.
+ # Corresponds to the JSON property `bigqueryExport`
+ # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataProfileSpecPostScanActionsBigQueryExport]
+ attr_accessor :bigquery_export
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @bigquery_export = args[:bigquery_export] if args.key?(:bigquery_export)
+ end
+ end
+
+ # The configuration of BigQuery export post scan action.
+ class GoogleCloudDataplexV1DataProfileSpecPostScanActionsBigQueryExport
+ include Google::Apis::Core::Hashable
+
+ # Optional. The BigQuery table to export DataProfileScan results to. Format:
+ # projects/`project`/datasets/`dataset`/tables/`table`
+ # Corresponds to the JSON property `resultsTable`
+ # @return [String]
+ attr_accessor :results_table
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @results_table = args[:results_table] if args.key?(:results_table)
+ end
+ end
+
# The specification for fields to include or exclude in data profile scan.
class GoogleCloudDataplexV1DataProfileSpecSelectedFields
include Google::Apis::Core::Hashable
# Optional. Expected input is a list of fully qualified names of fields as in
@@ -1574,10 +1669,15 @@
# Corresponds to the JSON property `passed`
# @return [Boolean]
attr_accessor :passed
alias_method :passed?, :passed
+ # The result of post scan actions of DataQualityScan job.
+ # Corresponds to the JSON property `postScanActionsResult`
+ # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataQualityResultPostScanActionsResult]
+ attr_accessor :post_scan_actions_result
+
# The count of rows processed.
# Corresponds to the JSON property `rowCount`
# @return [Fixnum]
attr_accessor :row_count
@@ -1597,16 +1697,61 @@
# Update properties of this object
def update!(**args)
@dimensions = args[:dimensions] if args.key?(:dimensions)
@passed = args[:passed] if args.key?(:passed)
+ @post_scan_actions_result = args[:post_scan_actions_result] if args.key?(:post_scan_actions_result)
@row_count = args[:row_count] if args.key?(:row_count)
@rules = args[:rules] if args.key?(:rules)
@scanned_data = args[:scanned_data] if args.key?(:scanned_data)
end
end
+ # The result of post scan actions of DataQualityScan job.
+ class GoogleCloudDataplexV1DataQualityResultPostScanActionsResult
+ include Google::Apis::Core::Hashable
+
+ # The result of BigQuery export post scan action.
+ # Corresponds to the JSON property `bigqueryExportResult`
+ # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataQualityResultPostScanActionsResultBigQueryExportResult]
+ attr_accessor :bigquery_export_result
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @bigquery_export_result = args[:bigquery_export_result] if args.key?(:bigquery_export_result)
+ end
+ end
+
+ # The result of BigQuery export post scan action.
+ class GoogleCloudDataplexV1DataQualityResultPostScanActionsResultBigQueryExportResult
+ include Google::Apis::Core::Hashable
+
+ # Output only. Additional information about the BigQuery exporting.
+ # Corresponds to the JSON property `message`
+ # @return [String]
+ attr_accessor :message
+
+ # Output only. Execution state for the BigQuery exporting.
+ # Corresponds to the JSON property `state`
+ # @return [String]
+ attr_accessor :state
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @message = args[:message] if args.key?(:message)
+ @state = args[:state] if args.key?(:state)
+ end
+ end
+
# A rule captures data quality intent about a data source.
class GoogleCloudDataplexV1DataQualityRule
include Google::Apis::Core::Hashable
# Optional. The unnested column which this rule is evaluated against.
@@ -1776,11 +1921,11 @@
# Evaluates whether each column value matches a specified regex.
class GoogleCloudDataplexV1DataQualityRuleRegexExpectation
include Google::Apis::Core::Hashable
- # A regular expression the column value is expected to match.
+ # Optional. A regular expression the column value is expected to match.
# Corresponds to the JSON property `regex`
# @return [String]
attr_accessor :regex
def initialize(**args)
@@ -1859,11 +2004,11 @@
# needs to use BigQuery standard SQL syntax and should produce a boolean value
# per row as the result.Example: col1 >= 0 AND col2 < 10
class GoogleCloudDataplexV1DataQualityRuleRowConditionExpectation
include Google::Apis::Core::Hashable
- # The SQL expression.
+ # Optional. The SQL expression.
# Corresponds to the JSON property `sqlExpression`
# @return [String]
attr_accessor :sql_expression
def initialize(**args)
@@ -1878,11 +2023,11 @@
# Evaluates whether each column value is contained by a specified set.
class GoogleCloudDataplexV1DataQualityRuleSetExpectation
include Google::Apis::Core::Hashable
- # Expected values for the column value.
+ # Optional. Expected values for the column value.
# Corresponds to the JSON property `values`
# @return [Array<String>]
attr_accessor :values
def initialize(**args)
@@ -1898,38 +2043,38 @@
# Evaluates whether the column aggregate statistic lies between a specified
# range.
class GoogleCloudDataplexV1DataQualityRuleStatisticRangeExpectation
include Google::Apis::Core::Hashable
- # The maximum column statistic value allowed for a row to pass this validation.
- # At least one of min_value and max_value need to be provided.
+ # Optional. The maximum column statistic value allowed for a row to pass this
+ # validation.At least one of min_value and max_value need to be provided.
# Corresponds to the JSON property `maxValue`
# @return [String]
attr_accessor :max_value
- # The minimum column statistic value allowed for a row to pass this validation.
- # At least one of min_value and max_value need to be provided.
+ # Optional. The minimum column statistic value allowed for a row to pass this
+ # validation.At least one of min_value and max_value need to be provided.
# Corresponds to the JSON property `minValue`
# @return [String]
attr_accessor :min_value
- # The aggregate metric to evaluate.
+ # Optional. The aggregate metric to evaluate.
# Corresponds to the JSON property `statistic`
# @return [String]
attr_accessor :statistic
- # Whether column statistic needs to be strictly lesser than ('<') the maximum,
- # or if equality is allowed.Only relevant if a max_value has been defined.
- # Default = false.
+ # Optional. Whether column statistic needs to be strictly lesser than ('<') the
+ # maximum, or if equality is allowed.Only relevant if a max_value has been
+ # defined. Default = false.
# Corresponds to the JSON property `strictMaxEnabled`
# @return [Boolean]
attr_accessor :strict_max_enabled
alias_method :strict_max_enabled?, :strict_max_enabled
- # Whether column statistic needs to be strictly greater than ('>') the minimum,
- # or if equality is allowed.Only relevant if a min_value has been defined.
- # Default = false.
+ # Optional. Whether column statistic needs to be strictly greater than ('>') the
+ # minimum, or if equality is allowed.Only relevant if a min_value has been
+ # defined. Default = false.
# Corresponds to the JSON property `strictMinEnabled`
# @return [Boolean]
attr_accessor :strict_min_enabled
alias_method :strict_min_enabled?, :strict_min_enabled
@@ -1951,11 +2096,11 @@
# use BigQuery standard SQL syntax and should produce a scalar boolean result.
# Example: MIN(col1) >= 0
class GoogleCloudDataplexV1DataQualityRuleTableConditionExpectation
include Google::Apis::Core::Hashable
- # The SQL expression.
+ # Optional. The SQL expression.
# Corresponds to the JSON property `sqlExpression`
# @return [String]
attr_accessor :sql_expression
def initialize(**args)
@@ -2071,19 +2216,24 @@
# DataQualityScan related setting.
class GoogleCloudDataplexV1DataQualitySpec
include Google::Apis::Core::Hashable
+ # The configuration of post scan actions of DataQualityScan.
+ # Corresponds to the JSON property `postScanActions`
+ # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataQualitySpecPostScanActions]
+ attr_accessor :post_scan_actions
+
# Optional. A filter applied to all rows in a single DataScan job. The filter
# needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL
# syntax. Example: col1 >= 0 AND col2 < 10
# Corresponds to the JSON property `rowFilter`
# @return [String]
attr_accessor :row_filter
- # The list of rules to evaluate against a data source. At least one rule is
- # required.
+ # Required. The list of rules to evaluate against a data source. At least one
+ # rule is required.
# Corresponds to the JSON property `rules`
# @return [Array<Google::Apis::DataplexV1::GoogleCloudDataplexV1DataQualityRule>]
attr_accessor :rules
# Optional. The percentage of the records to be selected from the dataset for
@@ -2098,16 +2248,56 @@
update!(**args)
end
# Update properties of this object
def update!(**args)
+ @post_scan_actions = args[:post_scan_actions] if args.key?(:post_scan_actions)
@row_filter = args[:row_filter] if args.key?(:row_filter)
@rules = args[:rules] if args.key?(:rules)
@sampling_percent = args[:sampling_percent] if args.key?(:sampling_percent)
end
end
+ # The configuration of post scan actions of DataQualityScan.
+ class GoogleCloudDataplexV1DataQualitySpecPostScanActions
+ include Google::Apis::Core::Hashable
+
+ # The configuration of BigQuery export post scan action.
+ # Corresponds to the JSON property `bigqueryExport`
+ # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataQualitySpecPostScanActionsBigQueryExport]
+ attr_accessor :bigquery_export
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @bigquery_export = args[:bigquery_export] if args.key?(:bigquery_export)
+ end
+ end
+
+ # The configuration of BigQuery export post scan action.
+ class GoogleCloudDataplexV1DataQualitySpecPostScanActionsBigQueryExport
+ include Google::Apis::Core::Hashable
+
+ # Optional. The BigQuery table to export DataQualityScan results to. Format:
+ # projects/`project`/datasets/`dataset`/tables/`table`
+ # Corresponds to the JSON property `resultsTable`
+ # @return [String]
+ attr_accessor :results_table
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @results_table = args[:results_table] if args.key?(:results_table)
+ end
+ end
+
# Represents a user-visible job which provides the insights for the related data
# source.For example: Data Quality: generates queries based on the rules and
# runs against the data to get data quality check results. Data Profile:
# analyzes the data in table(s) and generates insights about the structure,
# content and relationships (such as null percent, cardinality, min/max/mean,
@@ -2668,10 +2858,15 @@
# Output only. The number of attributes in the DataTaxonomy.
# Corresponds to the JSON property `attributeCount`
# @return [Fixnum]
attr_accessor :attribute_count
+ # Output only. The number of classes in the DataTaxonomy.
+ # Corresponds to the JSON property `classCount`
+ # @return [Fixnum]
+ attr_accessor :class_count
+
# Output only. The time when the DataTaxonomy was created.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
@@ -2721,9 +2916,10 @@
end
# Update properties of this object
def update!(**args)
@attribute_count = args[:attribute_count] if args.key?(:attribute_count)
+ @class_count = args[:class_count] if args.key?(:class_count)
@create_time = args[:create_time] if args.key?(:create_time)
@description = args[:description] if args.key?(:description)
@display_name = args[:display_name] if args.key?(:display_name)
@etag = args[:etag] if args.key?(:etag)
@labels = args[:labels] if args.key?(:labels)