diff --git a/src/schema-validation.jsonc b/src/schema-validation.jsonc index e551ca119ce..e6c0bf97f1f 100644 --- a/src/schema-validation.jsonc +++ b/src/schema-validation.jsonc @@ -647,6 +647,9 @@ "dart-build.json": { "unknownKeywords": ["deprecationMessage", "sources"] }, + "databricks-asset-bundles.json": { + "unknownKeywords": ["markdownDescription"] + }, "dein.json": { "unknownKeywords": ["x-taplo", "x-taplo-info"] }, diff --git a/src/schemas/json/databricks-asset-bundles.json b/src/schemas/json/databricks-asset-bundles.json index 178864e1c64..7153bf5504f 100644 --- a/src/schemas/json/databricks-asset-bundles.json +++ b/src/schemas/json/databricks-asset-bundles.json @@ -3,7 +3,7 @@ "$id": "https://json.schemastore.org/databricks-asset-bundles.json", "$defs": { "bool": { - "anyOf": [ + "oneOf": [ { "type": "boolean" }, @@ -30,7 +30,7 @@ ] }, "float64": { - "anyOf": [ + "oneOf": [ { "type": "number" }, @@ -61,8 +61,80 @@ "cli": { "bundle": { "config": { + "resources.App": { + "oneOf": [ + { + "type": "object", + "properties": { + "active_deployment": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeployment" + }, + "app_status": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus" + }, + "compute_status": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus" + }, + "config": { + "$ref": "#/$defs/map/interface" + }, + "create_time": { + "$ref": "#/$defs/string" + }, + "creator": { + "$ref": "#/$defs/string" + }, + "default_source_code_path": { + "$ref": "#/$defs/string" + }, + "description": { + "$ref": "#/$defs/string" + }, + "name": { + "$ref": "#/$defs/string" + }, + "pending_deployment": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeployment" + }, + "permissions": { + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission" + }, + "resources": { + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/apps.AppResource" + }, + "service_principal_client_id": { + "$ref": "#/$defs/string" + }, + "service_principal_id": { + "$ref": "#/$defs/int64" + }, + "service_principal_name": { + "$ref": "#/$defs/string" + }, + "source_code_path": { + "$ref": "#/$defs/string" + }, + "update_time": { + "$ref": "#/$defs/string" + }, + "updater": { + "$ref": "#/$defs/string" + }, + "url": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": ["source_code_path", "name"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.Cluster": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -132,6 +204,13 @@ "$ref": "#/$defs/string", "description": "The optional ID of the instance pool to which the cluster belongs." }, + "is_single_node": { + "$ref": "#/$defs/bool", + "description": "This field can only be used with `kind`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`\n" + }, + "kind": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind" + }, "node_type_id": { "$ref": "#/$defs/string", "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.\n" @@ -170,6 +249,10 @@ "$ref": "#/$defs/slice/string", "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified." }, + "use_ml_runtime": { + "$ref": "#/$defs/bool", + "description": "This field can only be used with `kind`.\n\n`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.\n" + }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType" } @@ -182,16 +265,81 @@ } ] }, + "resources.Dashboard": { + "oneOf": [ + { + "type": "object", + "properties": { + "create_time": { + "$ref": "#/$defs/string", + "description": "The timestamp of when the dashboard was created." + }, + "dashboard_id": { + "$ref": "#/$defs/string", + "description": "UUID identifying the dashboard." + }, + "display_name": { + "$ref": "#/$defs/string", + "description": "The display name of the dashboard." + }, + "embed_credentials": { + "$ref": "#/$defs/bool" + }, + "etag": { + "$ref": "#/$defs/string", + "description": "The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard\nhas not been modified since the last read.\nThis field is excluded in List Dashboards responses." + }, + "file_path": { + "$ref": "#/$defs/string" + }, + "lifecycle_state": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/dashboards.LifecycleState", + "description": "The state of the dashboard resource. Used for tracking trashed status." + }, + "parent_path": { + "$ref": "#/$defs/string", + "description": "The workspace path of the folder containing the dashboard. Includes leading slash and no\ntrailing slash.\nThis field is excluded in List Dashboards responses." + }, + "path": { + "$ref": "#/$defs/string", + "description": "The workspace path of the dashboard asset, including the file name.\nExported dashboards always have the file extension `.lvdash.json`.\nThis field is excluded in List Dashboards responses." + }, + "permissions": { + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission" + }, + "serialized_dashboard": { + "$ref": "#/$defs/interface", + "description": "The contents of the dashboard in serialized string form.\nThis field is excluded in List Dashboards responses.\nUse the [get dashboard API](https://docs.databricks.com/api/workspace/lakeview/get)\nto retrieve an example response, which includes the `serialized_dashboard` field.\nThis field provides the structure of the JSON string that represents the dashboard's\nlayout and components." + }, + "update_time": { + "$ref": "#/$defs/string", + "description": "The timestamp of when the dashboard was last updated by the user.\nThis field is excluded in List Dashboards responses." + }, + "warehouse_id": { + "$ref": "#/$defs/string", + "description": "The warehouse ID used to run the dashboard." + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.Grant": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "principal": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The name of the principal that will be granted privileges" }, "privileges": { - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "description": "The privileges to grant to the specified entity" } }, "additionalProperties": false, @@ -204,10 +352,14 @@ ] }, "resources.Job": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { + "budget_policy_id": { + "$ref": "#/$defs/string", + "description": "The id of the user specified budget policy to use for this job.\nIf not specified, a default budget policy may be applied when creating or modifying the job.\nSee `effective_budget_policy_id` for the budget policy used by this workload." + }, "continuous": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous", "description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used." @@ -233,7 +385,7 @@ }, "job_clusters": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster", - "description": "A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings." + "description": "A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.\nIf more than 100 job clusters are available, you can paginate through them using :method:jobs/get." }, "max_concurrent_runs": { "$ref": "#/$defs/int", @@ -271,7 +423,7 @@ }, "tasks": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Task", - "description": "A list of task specifications to be executed by this job." + "description": "A list of task specifications to be executed by this job.\nIf more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available." }, "timeout_seconds": { "$ref": "#/$defs/int", @@ -295,7 +447,7 @@ ] }, "resources.MlflowExperiment": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -340,7 +492,7 @@ ] }, "resources.MlflowModel": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -385,7 +537,7 @@ ] }, "resources.ModelServingEndpoint": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -427,21 +579,25 @@ ] }, "resources.Permission": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The name of the group that has the permission set in level." }, "level": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The allowed permission for user, group, service principal defined for this permission." }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The name of the service principal that has the permission set in level." }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The name of the user that has the permission set in level." } }, "additionalProperties": false, @@ -454,7 +610,7 @@ ] }, "resources.Pipeline": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -500,7 +656,7 @@ }, "gateway_definition": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition", - "description": "The definition of a gateway pipeline to support CDC." + "description": "The definition of a gateway pipeline to support change data capture." }, "id": { "$ref": "#/$defs/string", @@ -529,6 +685,10 @@ "$ref": "#/$defs/bool", "description": "Whether Photon is enabled for this pipeline." }, + "restart_window": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindow", + "description": "Restart window of this pipeline." + }, "schema": { "$ref": "#/$defs/string", "description": "The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode." @@ -559,7 +719,7 @@ ] }, "resources.QualityMonitor": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -607,6 +767,9 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot", "description": "Configuration for monitoring snapshot tables." }, + "table_name": { + "$ref": "#/$defs/string" + }, "time_series": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries", "description": "Configuration for monitoring time series tables." @@ -617,7 +780,11 @@ } }, "additionalProperties": false, - "required": ["assets_dir", "output_schema_name"] + "required": [ + "table_name", + "assets_dir", + "output_schema_name" + ] }, { "type": "string", @@ -626,7 +793,7 @@ ] }, "resources.RegisteredModel": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -664,7 +831,7 @@ ] }, "resources.Schema": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -700,8 +867,49 @@ } ] }, + "resources.Volume": { + "oneOf": [ + { + "type": "object", + "properties": { + "catalog_name": { + "$ref": "#/$defs/string", + "description": "The name of the catalog where the schema and the volume are" + }, + "comment": { + "$ref": "#/$defs/string", + "description": "The comment attached to the volume" + }, + "grants": { + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Grant" + }, + "name": { + "$ref": "#/$defs/string", + "description": "The name of the volume" + }, + "schema_name": { + "$ref": "#/$defs/string", + "description": "The name of the schema where the volume is" + }, + "storage_location": { + "$ref": "#/$defs/string", + "description": "The storage location on the cloud" + }, + "volume_type": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.VolumeType" + } + }, + "additionalProperties": false, + "required": ["catalog_name", "name", "schema_name"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "variable.Lookup": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -726,6 +934,9 @@ "metastore": { "$ref": "#/$defs/string" }, + "notification_destination": { + "$ref": "#/$defs/string" + }, "pipeline": { "$ref": "#/$defs/string" }, @@ -756,13 +967,16 @@ "$ref": "#/$defs/interface" }, "description": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The description of the variable." }, "lookup": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", + "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID." }, "type": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", + "description": "The type of the variable." } }, "additionalProperties": false @@ -777,13 +991,17 @@ "$ref": "#/$defs/interface" }, "description": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The description of the variable" }, "lookup": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", + "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", + "markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.\"" }, "type": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", + "description": "The type of the variable." } }, "additionalProperties": false @@ -793,24 +1011,31 @@ } }, "config.Artifact": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "build": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "An optional set of non-default build commands that you want to run locally before deployment.\n\nFor Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment.\n\nTo specify multiple build commands, separate each command with double-ampersand (\u0026\u0026) characters." }, "executable": { - "$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType" + "$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType", + "description": "The executable type." }, "files": { - "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile" + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile", + "description": "The source files for the artifact.", + "markdownDescription": "The source files for the artifact, defined as an [artifact_file](https://docs.databricks.com/dev-tools/bundles/reference.html#artifact_file)." }, "path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The location where the built artifact will be saved." }, "type": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType", + "description": "The type of the artifact.", + "markdownDescription": "The type of the artifact. Valid values are `wheel` or `jar`" } }, "additionalProperties": false, @@ -823,12 +1048,13 @@ ] }, "config.ArtifactFile": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "source": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The path of the files used to build the artifact." } }, "additionalProperties": false, @@ -844,27 +1070,40 @@ "type": "string" }, "config.Bundle": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "cluster_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The ID of a cluster to use to run the bundle.", + "markdownDescription": "The ID of a cluster to use to run the bundle. See [cluster_id](https://docs.databricks.com/dev-tools/bundles/settings.html#cluster_id)." }, "compute_id": { "$ref": "#/$defs/string" }, "databricks_cli_version": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks CLI version to use for the bundle.", + "markdownDescription": "The Databricks CLI version to use for the bundle. See [databricks_cli_version](https://docs.databricks.com/dev-tools/bundles/settings.html#databricks_cli_version)." }, "deployment": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment", + "description": "The definition of the bundle deployment", + "markdownDescription": "The definition of the bundle deployment. For supported attributes, see [deployment](https://docs.databricks.com/dev-tools/bundles/reference.html#deployment) and [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html)." }, "git": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", + "description": "The Git version control details that are associated with your bundle.", + "markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes, see [git](https://docs.databricks.com/dev-tools/bundles/reference.html#git) and [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)." }, "name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The name of the bundle." + }, + "uuid": { + "$ref": "#/$defs/string", + "description": "Reserved. A Universally Unique Identifier (UUID) for the bundle that uniquely identifies the bundle in internal Databricks systems. This is generated when a bundle project is initialized using a Databricks template (using the `databricks bundle init` command)." } }, "additionalProperties": false, @@ -880,15 +1119,18 @@ "type": "string" }, "config.Deployment": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "fail_on_active_runs": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted." }, "lock": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock", + "description": "The deployment lock attributes.", + "markdownDescription": "The deployment lock attributes. See [lock](https://docs.databricks.com/dev-tools/bundles/reference.html#lock)." } }, "additionalProperties": false @@ -900,21 +1142,29 @@ ] }, "config.Experimental": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "pydabs": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs", + "description": "The PyDABs configuration." + }, + "python": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python", + "description": "Configures loading of Python code defined with 'databricks-bundles' package." }, "python_wheel_wrapper": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether to use a Python wheel wrapper" }, "scripts": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command", + "description": "The commands to run" }, "use_legacy_run_as": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether to use the legacy run_as behavior" } }, "additionalProperties": false @@ -926,15 +1176,19 @@ ] }, "config.Git": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "branch": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Git branch name.", + "markdownDescription": "The Git branch name. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)." }, "origin_url": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The origin URL of the repository.", + "markdownDescription": "The origin URL of the repository. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)." } }, "additionalProperties": false @@ -946,15 +1200,17 @@ ] }, "config.Lock": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "enabled": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether this lock is enabled." }, "force": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether to force this lock if it is enabled." } }, "additionalProperties": false @@ -969,24 +1225,33 @@ "type": "string" }, "config.Presets": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "jobs_max_concurrent_runs": { - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "description": "The maximum concurrent runs for a job." }, "name_prefix": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The prefix for job runs of the bundle." }, "pipelines_development": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether pipeline deployments should be locked in development mode." + }, + "source_linked_deployment": { + "$ref": "#/$defs/bool", + "description": "Whether to link the deployment to the bundle source." }, "tags": { - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "description": "The tags for the bundle deployment." }, "trigger_pause_status": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED." } }, "additionalProperties": false @@ -998,18 +1263,21 @@ ] }, "config.PyDABs": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "enabled": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether or not PyDABs (Private Preview) is enabled" }, "import": { - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "description": "The PyDABs project to import to discover resources, resource generator and mutators" }, "venv_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Python virtual environment path" } }, "additionalProperties": false @@ -1020,37 +1288,93 @@ } ] }, + "config.Python": { + "oneOf": [ + { + "type": "object", + "properties": { + "mutators": { + "$ref": "#/$defs/slice/string", + "description": "Mutators contains a list of fully qualified function paths to mutator functions.\n\nExample: [\"my_project.mutators:add_default_cluster\"]" + }, + "resources": { + "$ref": "#/$defs/slice/string", + "description": "Resources contains a list of fully qualified function paths to load resources\ndefined in Python code.\n\nExample: [\"my_project.resources:load_resources\"]" + }, + "venv_path": { + "$ref": "#/$defs/string", + "description": "VEnvPath is path to the virtual environment.\n\nIf enabled, Python code will execute within this environment. If disabled,\nit defaults to using the Python interpreter available in the current shell." + } + }, + "additionalProperties": false, + "required": ["resources", "mutators"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "config.Resources": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { + "apps": { + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App" + }, "clusters": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster", + "description": "The cluster definitions for the bundle.", + "markdownDescription": "The cluster definitions for the bundle. See [cluster](https://docs.databricks.com/dev-tools/bundles/resources.html#cluster)" + }, + "dashboards": { + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Dashboard", + "description": "The dashboard definitions for the bundle.", + "markdownDescription": "The dashboard definitions for the bundle. See [dashboard](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboard)" }, "experiments": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment", + "description": "The experiment definitions for the bundle.", + "markdownDescription": "The experiment definitions for the bundle. See [experiment](https://docs.databricks.com/dev-tools/bundles/resources.html#experiment)" }, "jobs": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job", + "description": "The job definitions for the bundle.", + "markdownDescription": "The job definitions for the bundle. See [job](https://docs.databricks.com/dev-tools/bundles/resources.html#job)" }, "model_serving_endpoints": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint", + "description": "The model serving endpoint definitions for the bundle.", + "markdownDescription": "The model serving endpoint definitions for the bundle. See [model_serving_endpoint](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoint)" }, "models": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel", + "description": "The model definitions for the bundle.", + "markdownDescription": "The model definitions for the bundle. See [model](https://docs.databricks.com/dev-tools/bundles/resources.html#model)" }, "pipelines": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline", + "description": "The pipeline definitions for the bundle.", + "markdownDescription": "The pipeline definitions for the bundle. See [pipeline](https://docs.databricks.com/dev-tools/bundles/resources.html#pipeline)" }, "quality_monitors": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor", + "description": "The quality monitor definitions for the bundle.", + "markdownDescription": "The quality monitor definitions for the bundle. See [quality_monitor](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitor)" }, "registered_models": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel", + "description": "The registered model definitions for the bundle.", + "markdownDescription": "The registered model definitions for the bundle. See [registered_model](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_model)" }, "schemas": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema", + "description": "The schema definitions for the bundle.", + "markdownDescription": "The schema definitions for the bundle. See [schema](https://docs.databricks.com/dev-tools/bundles/resources.html#schema)" + }, + "volumes": { + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume" } }, "additionalProperties": false @@ -1062,18 +1386,21 @@ ] }, "config.Sync": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "exclude": { - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "description": "A list of files or folders to exclude from the bundle." }, "include": { - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "description": "A list of files or folders to include in the bundle." }, "paths": { - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "description": "The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed." } }, "additionalProperties": false @@ -1085,51 +1412,75 @@ ] }, "config.Target": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "artifacts": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", + "description": "The artifacts to include in the target deployment.", + "markdownDescription": "The artifacts to include in the target deployment. See [artifact](https://docs.databricks.com/dev-tools/bundles/reference.html#artifact)" }, "bundle": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", + "description": "The name of the bundle when deploying to this target." }, "cluster_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The ID of the cluster to use for this target." }, "compute_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "Deprecated. The ID of the compute to use for this target." }, "default": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether this target is the default target." }, "git": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", + "description": "The Git version control settings for the target.", + "markdownDescription": "The Git version control settings for the target. See [git](https://docs.databricks.com/dev-tools/bundles/reference.html#git)." }, "mode": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode", + "description": "The deployment mode for the target.", + "markdownDescription": "The deployment mode for the target. Valid values are `development` or `production`. See [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html)." }, "permissions": { - "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission" + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", + "description": "The permissions for deploying and running the bundle in the target.", + "markdownDescription": "The permissions for deploying and running the bundle in the target. See [permission](https://docs.databricks.com/dev-tools/bundles/reference.html#permission)." }, "presets": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", + "description": "The deployment presets for the target.", + "markdownDescription": "The deployment presets for the target. See [preset](https://docs.databricks.com/dev-tools/bundles/reference.html#preset)." }, "resources": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", + "description": "The resource definitions for the target.", + "markdownDescription": "The resource definitions for the target. See [resources](https://docs.databricks.com/dev-tools/bundles/reference.html#resources)." }, "run_as": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", + "description": "The identity to use to run the bundle.", + "markdownDescription": "The identity to use to run the bundle. See [job_run_as](https://docs.databricks.com/dev-tools/bundles/reference.html#job_run_as) and [link](https://docs.databricks.com/dev-tools/bundles/run_as.html)." }, "sync": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", + "description": "The local paths to sync to the target workspace when a bundle is run or deployed.", + "markdownDescription": "The local paths to sync to the target workspace when a bundle is run or deployed. See [sync](https://docs.databricks.com/dev-tools/bundles/reference.html#sync)." }, "variables": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable", + "description": "The custom variable definitions for the target.", + "markdownDescription": "The custom variable definitions for the target. See [variables](https://docs.databricks.com/dev-tools/bundles/settings.html#variables) and [link](https://docs.databricks.com/dev-tools/bundles/variables.html)." }, "workspace": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", + "description": "The Databricks workspace for the target.", + "markdownDescription": "The Databricks workspace for the target. [workspace](https://docs.databricks.com/dev-tools/bundles/reference.html#workspace)" } }, "additionalProperties": false @@ -1141,57 +1492,73 @@ ] }, "config.Workspace": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "artifact_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The artifact path to use within the workspace for both deployments and workflow runs" }, "auth_type": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The authentication type." }, "azure_client_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Azure client ID" }, "azure_environment": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Azure environment" }, "azure_login_app_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Azure login app ID" }, "azure_tenant_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Azure tenant ID" }, "azure_use_msi": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "description": "Whether to use MSI for Azure" }, "azure_workspace_resource_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Azure workspace resource ID" }, "client_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The client ID for the workspace" }, "file_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The file path to use within the workspace for both deployments and workflow runs" }, "google_service_account": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Google service account name" }, "host": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks workspace host URL" }, "profile": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks workspace profile name" }, "resource_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The workspace resource path" }, "root_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks workspace root path" }, "state_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The workspace state path" } }, "additionalProperties": false @@ -1211,15 +1578,359 @@ }, "databricks-sdk-go": { "service": { + "apps.AppDeployment": { + "oneOf": [ + { + "type": "object", + "properties": { + "create_time": { + "$ref": "#/$defs/string" + }, + "creator": { + "$ref": "#/$defs/string" + }, + "deployment_artifacts": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts" + }, + "deployment_id": { + "$ref": "#/$defs/string" + }, + "mode": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentMode" + }, + "source_code_path": { + "$ref": "#/$defs/string" + }, + "status": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus" + }, + "update_time": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppDeploymentArtifacts": { + "oneOf": [ + { + "type": "object", + "properties": { + "source_code_path": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppDeploymentMode": { + "oneOf": [ + { + "type": "string", + "enum": ["SNAPSHOT", "AUTO_SYNC"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppDeploymentState": { + "oneOf": [ + { + "type": "string", + "enum": ["SUCCEEDED", "FAILED", "IN_PROGRESS", "CANCELLED"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppDeploymentStatus": { + "oneOf": [ + { + "type": "object", + "properties": { + "message": { + "$ref": "#/$defs/string" + }, + "state": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentState" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResource": { + "oneOf": [ + { + "type": "object", + "properties": { + "description": { + "$ref": "#/$defs/string" + }, + "job": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob" + }, + "name": { + "$ref": "#/$defs/string" + }, + "secret": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret" + }, + "serving_endpoint": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint" + }, + "sql_warehouse": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse" + } + }, + "additionalProperties": false, + "required": ["name"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceJob": { + "oneOf": [ + { + "type": "object", + "properties": { + "id": { + "$ref": "#/$defs/string" + }, + "permission": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJobJobPermission" + } + }, + "additionalProperties": false, + "required": ["id", "permission"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceJobJobPermission": { + "oneOf": [ + { + "type": "string", + "enum": [ + "CAN_MANAGE", + "IS_OWNER", + "CAN_MANAGE_RUN", + "CAN_VIEW" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceSecret": { + "oneOf": [ + { + "type": "object", + "properties": { + "key": { + "$ref": "#/$defs/string" + }, + "permission": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecretSecretPermission" + }, + "scope": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": ["key", "permission", "scope"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceSecretSecretPermission": { + "oneOf": [ + { + "type": "string", + "description": "Permission to grant on the secret scope. Supported permissions are: \"READ\", \"WRITE\", \"MANAGE\".", + "enum": ["READ", "WRITE", "MANAGE"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceServingEndpoint": { + "oneOf": [ + { + "type": "object", + "properties": { + "name": { + "$ref": "#/$defs/string" + }, + "permission": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpointServingEndpointPermission" + } + }, + "additionalProperties": false, + "required": ["name", "permission"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceServingEndpointServingEndpointPermission": { + "oneOf": [ + { + "type": "string", + "enum": ["CAN_MANAGE", "CAN_QUERY", "CAN_VIEW"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceSqlWarehouse": { + "oneOf": [ + { + "type": "object", + "properties": { + "id": { + "$ref": "#/$defs/string" + }, + "permission": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouseSqlWarehousePermission" + } + }, + "additionalProperties": false, + "required": ["id", "permission"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.AppResourceSqlWarehouseSqlWarehousePermission": { + "oneOf": [ + { + "type": "string", + "enum": ["CAN_MANAGE", "CAN_USE", "IS_OWNER"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.ApplicationState": { + "oneOf": [ + { + "type": "string", + "enum": ["DEPLOYING", "RUNNING", "CRASHED", "UNAVAILABLE"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.ApplicationStatus": { + "oneOf": [ + { + "type": "object", + "properties": { + "message": { + "$ref": "#/$defs/string" + }, + "state": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ApplicationState" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.ComputeState": { + "oneOf": [ + { + "type": "string", + "enum": [ + "ERROR", + "DELETING", + "STARTING", + "STOPPING", + "UPDATING", + "STOPPED", + "ACTIVE" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "apps.ComputeStatus": { + "oneOf": [ + { + "type": "object", + "properties": { + "message": { + "$ref": "#/$defs/string" + }, + "state": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ComputeState" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "catalog.MonitorCronSchedule": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "pause_status": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedulePauseStatus", - "description": "Read only field that indicates whether a schedule is paused or not.", - "enum": ["UNPAUSED", "PAUSED"] + "description": "Read only field that indicates whether a schedule is paused or not." }, "quartz_cron_expression": { "$ref": "#/$defs/string", @@ -1240,10 +1951,20 @@ ] }, "catalog.MonitorCronSchedulePauseStatus": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Read only field that indicates whether a schedule is paused or not.", + "enum": ["UNPAUSED", "PAUSED"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "catalog.MonitorDataClassificationConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1261,7 +1982,7 @@ ] }, "catalog.MonitorDestination": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1279,7 +2000,7 @@ ] }, "catalog.MonitorInferenceLog": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1305,11 +2026,7 @@ }, "problem_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLogProblemType", - "description": "Problem type the model aims to solve. Determines the type of model-quality metrics that will be computed.", - "enum": [ - "PROBLEM_TYPE_CLASSIFICATION", - "PROBLEM_TYPE_REGRESSION" - ] + "description": "Problem type the model aims to solve. Determines the type of model-quality metrics that will be computed." }, "timestamp_col": { "$ref": "#/$defs/string", @@ -1332,10 +2049,23 @@ ] }, "catalog.MonitorInferenceLogProblemType": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Problem type the model aims to solve. Determines the type of model-quality metrics that will be computed.", + "enum": [ + "PROBLEM_TYPE_CLASSIFICATION", + "PROBLEM_TYPE_REGRESSION" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "catalog.MonitorMetric": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1357,12 +2087,7 @@ }, "type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetricType", - "description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics\n", - "enum": [ - "CUSTOM_METRIC_TYPE_AGGREGATE", - "CUSTOM_METRIC_TYPE_DERIVED", - "CUSTOM_METRIC_TYPE_DRIFT" - ] + "description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics\n" } }, "additionalProperties": false, @@ -1381,10 +2106,24 @@ ] }, "catalog.MonitorMetricType": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics\n", + "enum": [ + "CUSTOM_METRIC_TYPE_AGGREGATE", + "CUSTOM_METRIC_TYPE_DERIVED", + "CUSTOM_METRIC_TYPE_DRIFT" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "catalog.MonitorNotifications": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1406,7 +2145,7 @@ ] }, "catalog.MonitorSnapshot": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": false @@ -1418,7 +2157,7 @@ ] }, "catalog.MonitorTimeSeries": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1440,8 +2179,20 @@ } ] }, + "catalog.VolumeType": { + "oneOf": [ + { + "type": "string", + "enum": ["EXTERNAL", "MANAGED"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "compute.Adlsgen2Info": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1460,7 +2211,7 @@ ] }, "compute.AutoScale": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1482,7 +2233,7 @@ ] }, "compute.AwsAttributes": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1534,12 +2285,20 @@ ] }, "compute.AwsAvailability": { - "type": "string", - "description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\n\nNote: If `first_on_demand` is zero, this availability type will be used for the entire cluster.\n", - "enum": ["SPOT", "ON_DEMAND", "SPOT_WITH_FALLBACK"] + "oneOf": [ + { + "type": "string", + "description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\n\nNote: If `first_on_demand` is zero, this availability type will be used for the entire cluster.\n", + "enum": ["SPOT", "ON_DEMAND", "SPOT_WITH_FALLBACK"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "compute.AzureAttributes": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1568,16 +2327,24 @@ ] }, "compute.AzureAvailability": { - "type": "string", - "description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\nNote: If `first_on_demand` is zero (which only happens on pool clusters), this availability\ntype will be used for the entire cluster.", - "enum": [ - "SPOT_AZURE", - "ON_DEMAND_AZURE", - "SPOT_WITH_FALLBACK_AZURE" + "oneOf": [ + { + "type": "string", + "description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\nNote: If `first_on_demand` is zero (which only happens on pool clusters), this availability\ntype will be used for the entire cluster.", + "enum": [ + "SPOT_AZURE", + "ON_DEMAND_AZURE", + "SPOT_WITH_FALLBACK_AZURE" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } ] }, "compute.ClientsTypes": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1599,7 +2366,7 @@ ] }, "compute.ClusterLogConf": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1621,7 +2388,7 @@ ] }, "compute.ClusterSpec": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1691,6 +2458,13 @@ "$ref": "#/$defs/string", "description": "The optional ID of the instance pool to which the cluster belongs." }, + "is_single_node": { + "$ref": "#/$defs/bool", + "description": "This field can only be used with `kind`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`\n" + }, + "kind": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind" + }, "node_type_id": { "$ref": "#/$defs/string", "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.\n" @@ -1726,6 +2500,10 @@ "$ref": "#/$defs/slice/string", "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified." }, + "use_ml_runtime": { + "$ref": "#/$defs/bool", + "description": "This field can only be used with `kind`.\n\n`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.\n" + }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType" } @@ -1739,20 +2517,31 @@ ] }, "compute.DataSecurityMode": { - "type": "string", - "description": "Data security mode decides what data governance model to use when accessing data\nfrom a cluster.\n\n* `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode.\n* `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode.\n* `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited.\n\nThe following modes are deprecated starting with Databricks Runtime 15.0 and\nwill be removed for future Databricks Runtime versions:\n\n* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters.\n* `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters.\n* `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters.\n* `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.\n", - "enum": [ - "NONE", - "SINGLE_USER", - "USER_ISOLATION", - "LEGACY_TABLE_ACL", - "LEGACY_PASSTHROUGH", - "LEGACY_SINGLE_USER", - "LEGACY_SINGLE_USER_STANDARD" + "oneOf": [ + { + "type": "string", + "description": "Data security mode decides what data governance model to use when accessing data\nfrom a cluster.\n\nThe following modes can only be used with `kind`.\n* `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration.\n* `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`.\n* `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.\n\nThe following modes can be used regardless of `kind`.\n* `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode.\n* `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode.\n* `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited.\n\nThe following modes are deprecated starting with Databricks Runtime 15.0 and\nwill be removed for future Databricks Runtime versions:\n\n* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters.\n* `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters.\n* `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters.\n* `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.\n", + "enum": [ + "DATA_SECURITY_MODE_AUTO", + "DATA_SECURITY_MODE_STANDARD", + "DATA_SECURITY_MODE_DEDICATED", + "NONE", + "SINGLE_USER", + "USER_ISOLATION", + "LEGACY_TABLE_ACL", + "LEGACY_PASSTHROUGH", + "LEGACY_SINGLE_USER", + "LEGACY_SINGLE_USER_STANDARD" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } ] }, "compute.DbfsStorageInfo": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1771,7 +2560,7 @@ ] }, "compute.DockerBasicAuth": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1793,7 +2582,7 @@ ] }, "compute.DockerImage": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1814,12 +2603,20 @@ ] }, "compute.EbsVolumeType": { - "type": "string", - "description": "The type of EBS volumes that will be launched with this cluster.", - "enum": ["GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"] + "oneOf": [ + { + "type": "string", + "description": "The type of EBS volumes that will be launched with this cluster.", + "enum": ["GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "compute.Environment": { - "anyOf": [ + "oneOf": [ { "type": "object", "description": "The environment entity used to preserve serverless environment side panel and jobs' environment for non-notebook task.\nIn this minimal environment spec, only pip dependencies are supported.", @@ -1843,7 +2640,7 @@ ] }, "compute.GcpAttributes": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1880,16 +2677,24 @@ ] }, "compute.GcpAvailability": { - "type": "string", - "description": "This field determines whether the instance pool will contain preemptible\nVMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.", - "enum": [ - "PREEMPTIBLE_GCP", - "ON_DEMAND_GCP", - "PREEMPTIBLE_WITH_FALLBACK_GCP" + "oneOf": [ + { + "type": "string", + "description": "This field determines whether the instance pool will contain preemptible\nVMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.", + "enum": [ + "PREEMPTIBLE_GCP", + "ON_DEMAND_GCP", + "PREEMPTIBLE_WITH_FALLBACK_GCP" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } ] }, "compute.GcsStorageInfo": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1908,7 +2713,7 @@ ] }, "compute.InitScriptInfo": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1949,8 +2754,11 @@ } ] }, + "compute.Kind": { + "type": "string" + }, "compute.Library": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -1992,7 +2800,7 @@ ] }, "compute.LocalFileInfo": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2011,7 +2819,7 @@ ] }, "compute.LogAnalyticsInfo": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2033,7 +2841,7 @@ ] }, "compute.MavenLibrary": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2060,7 +2868,7 @@ ] }, "compute.PythonPyPiLibrary": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2083,7 +2891,7 @@ ] }, "compute.RCranLibrary": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2106,12 +2914,20 @@ ] }, "compute.RuntimeEngine": { - "type": "string", - "description": "Determines the cluster's runtime engine, either standard or Photon.\n\nThis field is not compatible with legacy `spark_version` values that contain `-photon-`.\nRemove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.\n\nIf left unspecified, the runtime engine defaults to standard unless the spark_version\ncontains -photon-, in which case Photon will be used.\n", - "enum": ["NULL", "STANDARD", "PHOTON"] + "oneOf": [ + { + "type": "string", + "description": "Determines the cluster's runtime engine, either standard or Photon.\n\nThis field is not compatible with legacy `spark_version` values that contain `-photon-`.\nRemove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.\n\nIf left unspecified, the runtime engine defaults to standard unless the spark_version\ncontains -photon-, in which case Photon will be used.\n", + "enum": ["NULL", "STANDARD", "PHOTON"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "compute.S3StorageInfo": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2154,7 +2970,7 @@ ] }, "compute.VolumesStorageInfo": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2173,7 +2989,7 @@ ] }, "compute.WorkloadType": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2192,7 +3008,7 @@ ] }, "compute.WorkspaceStorageInfo": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2210,12 +3026,63 @@ } ] }, + "dashboards.LifecycleState": { + "oneOf": [ + { + "type": "string", + "enum": ["ACTIVE", "TRASHED"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "jobs.CleanRoomsNotebookTask": { + "oneOf": [ + { + "type": "object", + "properties": { + "clean_room_name": { + "$ref": "#/$defs/string", + "description": "The clean room that the notebook belongs to." + }, + "etag": { + "$ref": "#/$defs/string", + "description": "Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the latest version).\nIt can be fetched by calling the :method:cleanroomassets/get API." + }, + "notebook_base_parameters": { + "$ref": "#/$defs/map/string", + "description": "Base parameters to be used for the clean room notebook job." + }, + "notebook_name": { + "$ref": "#/$defs/string", + "description": "Name of the notebook being run." + } + }, + "additionalProperties": false, + "required": ["clean_room_name", "notebook_name"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "jobs.Condition": { - "type": "string", - "enum": ["ANY_UPDATED", "ALL_UPDATED"] + "oneOf": [ + { + "type": "string", + "enum": ["ANY_UPDATED", "ALL_UPDATED"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.ConditionTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2242,19 +3109,27 @@ ] }, "jobs.ConditionTaskOp": { - "type": "string", - "description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.", - "enum": [ - "EQUAL_TO", - "GREATER_THAN", - "GREATER_THAN_OR_EQUAL", - "LESS_THAN", - "LESS_THAN_OR_EQUAL", - "NOT_EQUAL" + "oneOf": [ + { + "type": "string", + "description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.", + "enum": [ + "EQUAL_TO", + "GREATER_THAN", + "GREATER_THAN_OR_EQUAL", + "LESS_THAN", + "LESS_THAN_OR_EQUAL", + "NOT_EQUAL" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } ] }, "jobs.Continuous": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2272,7 +3147,7 @@ ] }, "jobs.CronSchedule": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2299,7 +3174,7 @@ ] }, "jobs.DbtTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2342,7 +3217,7 @@ ] }, "jobs.FileArrivalTriggerConfiguration": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2369,7 +3244,7 @@ ] }, "jobs.ForEachTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2396,24 +3271,40 @@ ] }, "jobs.Format": { - "type": "string", - "enum": ["SINGLE_TASK", "MULTI_TASK"] + "oneOf": [ + { + "type": "string", + "enum": ["SINGLE_TASK", "MULTI_TASK"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.GitProvider": { - "type": "string", - "enum": [ - "gitHub", - "bitbucketCloud", - "azureDevOpsServices", - "gitHubEnterprise", - "bitbucketServer", - "gitLab", - "gitLabEnterpriseEdition", - "awsCodeCommit" + "oneOf": [ + { + "type": "string", + "enum": [ + "gitHub", + "bitbucketCloud", + "azureDevOpsServices", + "gitHubEnterprise", + "bitbucketServer", + "gitLab", + "gitLabEnterpriseEdition", + "awsCodeCommit" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } ] }, "jobs.GitSnapshot": { - "anyOf": [ + "oneOf": [ { "type": "object", "description": "Read-only state of the remote repository at the time the job was run. This field is only included on job runs.", @@ -2432,7 +3323,7 @@ ] }, "jobs.GitSource": { - "anyOf": [ + "oneOf": [ { "type": "object", "description": "An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.\n\nIf `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.\n\nNote: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.", @@ -2468,7 +3359,7 @@ ] }, "jobs.JobCluster": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2491,7 +3382,7 @@ ] }, "jobs.JobDeployment": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2514,17 +3405,33 @@ ] }, "jobs.JobDeploymentKind": { - "type": "string", - "description": "* `BUNDLE`: The job is managed by Databricks Asset Bundle.", - "enum": ["BUNDLE"] + "oneOf": [ + { + "type": "string", + "description": "* `BUNDLE`: The job is managed by Databricks Asset Bundle.", + "enum": ["BUNDLE"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.JobEditMode": { - "type": "string", - "description": "Edit mode of the job.\n\n* `UI_LOCKED`: The job is in a locked UI state and cannot be modified.\n* `EDITABLE`: The job is in an editable state and can be modified.", - "enum": ["UI_LOCKED", "EDITABLE"] + "oneOf": [ + { + "type": "string", + "description": "Edit mode of the job.\n\n* `UI_LOCKED`: The job is in a locked UI state and cannot be modified.\n* `EDITABLE`: The job is in an editable state and can be modified.", + "enum": ["UI_LOCKED", "EDITABLE"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.JobEmailNotifications": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2562,7 +3469,7 @@ ] }, "jobs.JobEnvironment": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2584,7 +3491,7 @@ ] }, "jobs.JobNotificationSettings": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2606,7 +3513,7 @@ ] }, "jobs.JobParameterDefinition": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2629,10 +3536,10 @@ ] }, "jobs.JobRunAs": { - "anyOf": [ + "oneOf": [ { "type": "object", - "description": "Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If not specified, the job/pipeline runs as the user who created the job/pipeline.\n\nExactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an error is thrown.", + "description": "Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job.\n\nEither `user_name` or `service_principal_name` should be specified. If not, an error is thrown.", "properties": { "service_principal_name": { "$ref": "#/$defs/string", @@ -2652,7 +3559,7 @@ ] }, "jobs.JobSource": { - "anyOf": [ + "oneOf": [ { "type": "object", "description": "The source of the job specification in the remote repository when the job is source controlled.", @@ -2680,28 +3587,52 @@ ] }, "jobs.JobSourceDirtyState": { - "type": "string", - "description": "Dirty state indicates the job is not fully synced with the job specification\nin the remote repository.\n\nPossible values are:\n* `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced.\n* `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.", - "enum": ["NOT_SYNCED", "DISCONNECTED"] + "oneOf": [ + { + "type": "string", + "description": "Dirty state indicates the job is not fully synced with the job specification\nin the remote repository.\n\nPossible values are:\n* `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced.\n* `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.", + "enum": ["NOT_SYNCED", "DISCONNECTED"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.JobsHealthMetric": { - "type": "string", - "description": "Specifies the health metric that is being evaluated for a particular health rule.\n\n* `RUN_DURATION_SECONDS`: Expected total time for a run in seconds.\n* `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Private Preview.\n* `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Private Preview.\n* `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Private Preview.\n* `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Private Preview.", - "enum": [ - "RUN_DURATION_SECONDS", - "STREAMING_BACKLOG_BYTES", - "STREAMING_BACKLOG_RECORDS", - "STREAMING_BACKLOG_SECONDS", - "STREAMING_BACKLOG_FILES" + "oneOf": [ + { + "type": "string", + "description": "Specifies the health metric that is being evaluated for a particular health rule.\n\n* `RUN_DURATION_SECONDS`: Expected total time for a run in seconds.\n* `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Public Preview.\n* `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Public Preview.\n* `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Public Preview.\n* `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview.", + "enum": [ + "RUN_DURATION_SECONDS", + "STREAMING_BACKLOG_BYTES", + "STREAMING_BACKLOG_RECORDS", + "STREAMING_BACKLOG_SECONDS", + "STREAMING_BACKLOG_FILES" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } ] }, "jobs.JobsHealthOperator": { - "type": "string", - "description": "Specifies the operator used to compare the health metric value with the specified threshold.", - "enum": ["GREATER_THAN"] + "oneOf": [ + { + "type": "string", + "description": "Specifies the operator used to compare the health metric value with the specified threshold.", + "enum": ["GREATER_THAN"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.JobsHealthRule": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2726,7 +3657,7 @@ ] }, "jobs.JobsHealthRules": { - "anyOf": [ + "oneOf": [ { "type": "object", "description": "An optional set of health rules that can be defined for this job.", @@ -2744,7 +3675,7 @@ ] }, "jobs.NotebookTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2775,11 +3706,19 @@ ] }, "jobs.PauseStatus": { - "type": "string", - "enum": ["UNPAUSED", "PAUSED"] + "oneOf": [ + { + "type": "string", + "enum": ["UNPAUSED", "PAUSED"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.PeriodicTriggerConfiguration": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2802,11 +3741,19 @@ ] }, "jobs.PeriodicTriggerConfigurationTimeUnit": { - "type": "string", - "enum": ["HOURS", "DAYS", "WEEKS"] + "oneOf": [ + { + "type": "string", + "enum": ["HOURS", "DAYS", "WEEKS"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.PipelineParams": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2824,7 +3771,7 @@ ] }, "jobs.PipelineTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2847,7 +3794,7 @@ ] }, "jobs.PythonWheelTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2878,7 +3825,7 @@ ] }, "jobs.QueueSettings": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2897,19 +3844,27 @@ ] }, "jobs.RunIf": { - "type": "string", - "description": "An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`.\n\nPossible values are:\n* `ALL_SUCCESS`: All dependencies have executed and succeeded\n* `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded\n* `NONE_FAILED`: None of the dependencies have failed and at least one was executed\n* `ALL_DONE`: All dependencies have been completed\n* `AT_LEAST_ONE_FAILED`: At least one dependency failed\n* `ALL_FAILED`: ALl dependencies have failed", - "enum": [ - "ALL_SUCCESS", - "ALL_DONE", - "NONE_FAILED", - "AT_LEAST_ONE_SUCCESS", - "ALL_FAILED", - "AT_LEAST_ONE_FAILED" + "oneOf": [ + { + "type": "string", + "description": "An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`.\n\nPossible values are:\n* `ALL_SUCCESS`: All dependencies have executed and succeeded\n* `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded\n* `NONE_FAILED`: None of the dependencies have failed and at least one was executed\n* `ALL_DONE`: All dependencies have been completed\n* `AT_LEAST_ONE_FAILED`: At least one dependency failed\n* `ALL_FAILED`: ALl dependencies have failed", + "enum": [ + "ALL_SUCCESS", + "ALL_DONE", + "NONE_FAILED", + "AT_LEAST_ONE_SUCCESS", + "ALL_FAILED", + "AT_LEAST_ONE_FAILED" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } ] }, "jobs.RunJobTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2919,7 +3874,7 @@ }, "jar_params": { "$ref": "#/$defs/slice/string", - "description": "A list of parameters for jobs with Spark JAR tasks, for example `\"jar_params\": [\"john doe\", \"35\"]`.\nThe parameters are used to invoke the main function of the main class specified in the Spark JAR task.\nIf not specified upon `run-now`, it defaults to an empty list.\njar_params cannot be specified in conjunction with notebook_params.\nThe JSON representation of this field (for example `{\"jar_params\":[\"john doe\",\"35\"]}`) cannot exceed 10,000 bytes.\n\nUse [Task parameter variables](/jobs.html\\\"#parameter-variables\\\") to set parameters containing information about job runs." + "description": "A list of parameters for jobs with Spark JAR tasks, for example `\"jar_params\": [\"john doe\", \"35\"]`.\nThe parameters are used to invoke the main function of the main class specified in the Spark JAR task.\nIf not specified upon `run-now`, it defaults to an empty list.\njar_params cannot be specified in conjunction with notebook_params.\nThe JSON representation of this field (for example `{\"jar_params\":[\"john doe\",\"35\"]}`) cannot exceed 10,000 bytes.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs." }, "job_id": { "$ref": "#/$defs/int64", @@ -2963,12 +3918,20 @@ ] }, "jobs.Source": { - "type": "string", - "description": "Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\\\nfrom the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: SQL file is located in Databricks workspace.\n* `GIT`: SQL file is located in cloud Git provider.", - "enum": ["WORKSPACE", "GIT"] + "oneOf": [ + { + "type": "string", + "description": "Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\\\nfrom the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: SQL file is located in Databricks workspace.\n* `GIT`: SQL file is located in cloud Git provider.", + "enum": ["WORKSPACE", "GIT"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "jobs.SparkJarTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -2994,7 +3957,7 @@ ] }, "jobs.SparkPythonTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3021,7 +3984,7 @@ ] }, "jobs.SparkSubmitTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3039,7 +4002,7 @@ ] }, "jobs.SqlTask": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3078,7 +4041,7 @@ ] }, "jobs.SqlTaskAlert": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3105,7 +4068,7 @@ ] }, "jobs.SqlTaskDashboard": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3136,7 +4099,7 @@ ] }, "jobs.SqlTaskFile": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3159,7 +4122,7 @@ ] }, "jobs.SqlTaskQuery": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3178,7 +4141,7 @@ ] }, "jobs.SqlTaskSubscription": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3200,7 +4163,7 @@ ] }, "jobs.TableUpdateTriggerConfiguration": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3230,17 +4193,21 @@ ] }, "jobs.Task": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { + "clean_rooms_notebook_task": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask", + "description": "The task runs a [clean rooms](https://docs.databricks.com/en/clean-rooms/index.html) notebook\nwhen the `clean_rooms_notebook_task` field is present." + }, "condition_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask", - "description": "If condition_task, specifies a condition with an outcome that can be used to control the execution of other tasks. Does not require a cluster to execute and does not support retries or notifications." + "description": "The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present.\nThe condition task does not require a cluster to execute and does not support retries or notifications." }, "dbt_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtTask", - "description": "If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse." + "description": "The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse." }, "depends_on": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency", @@ -3268,7 +4235,7 @@ }, "for_each_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask", - "description": "If for_each_task, indicates that this task must execute the nested task within it." + "description": "The task executes a nested task for every input provided when the `for_each_task` field is present." }, "health": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules" @@ -3295,7 +4262,7 @@ }, "notebook_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask", - "description": "If notebook_task, indicates that this task must run a notebook. This field may not be specified in conjunction with spark_jar_task." + "description": "The task runs a notebook when the `notebook_task` field is present." }, "notification_settings": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings", @@ -3303,11 +4270,11 @@ }, "pipeline_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask", - "description": "If pipeline_task, indicates that this task must execute a Pipeline." + "description": "The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported." }, "python_wheel_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask", - "description": "If python_wheel_task, indicates that this job must execute a PythonWheel." + "description": "The task runs a Python wheel when the `python_wheel_task` field is present." }, "retry_on_timeout": { "$ref": "#/$defs/bool", @@ -3319,23 +4286,23 @@ }, "run_job_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask", - "description": "If run_job_task, indicates that this task must execute another job." + "description": "The task triggers another job when the `run_job_task` field is present." }, "spark_jar_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask", - "description": "If spark_jar_task, indicates that this task must run a JAR." + "description": "The task runs a JAR when the `spark_jar_task` field is present." }, "spark_python_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask", - "description": "If spark_python_task, indicates that this task must run a Python file." + "description": "The task runs a Python file when the `spark_python_task` field is present." }, "spark_submit_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask", - "description": "If `spark_submit_task`, indicates that this task must be launched by the spark submit script. This task can run only on new clusters.\n\nIn the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.\n\n`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.\n\nBy default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.\n\nThe `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths." + "description": "(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute.\n\nIn the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.\n\n`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.\n\nBy default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.\n\nThe `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths." }, "sql_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTask", - "description": "If sql_task, indicates that this job must execute a SQL task." + "description": "The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present." }, "task_key": { "$ref": "#/$defs/string", @@ -3360,7 +4327,7 @@ ] }, "jobs.TaskDependency": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3383,7 +4350,7 @@ ] }, "jobs.TaskEmailNotifications": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3421,7 +4388,7 @@ ] }, "jobs.TaskNotificationSettings": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3447,7 +4414,7 @@ ] }, "jobs.TriggerSettings": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3480,7 +4447,7 @@ ] }, "jobs.Webhook": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3498,7 +4465,7 @@ ] }, "jobs.WebhookNotifications": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3532,7 +4499,7 @@ ] }, "ml.ExperimentTag": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3554,7 +4521,7 @@ ] }, "ml.ModelTag": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3576,7 +4543,7 @@ ] }, "ml.ModelVersion": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3614,12 +4581,7 @@ }, "status": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/ml.ModelVersionStatus", - "description": "Current status of `model_version`", - "enum": [ - "PENDING_REGISTRATION", - "FAILED_REGISTRATION", - "READY" - ] + "description": "Current status of `model_version`" }, "status_message": { "$ref": "#/$defs/string", @@ -3647,10 +4609,24 @@ ] }, "ml.ModelVersionStatus": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Current status of `model_version`", + "enum": [ + "PENDING_REGISTRATION", + "FAILED_REGISTRATION", + "READY" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "ml.ModelVersionTag": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3672,7 +4648,7 @@ ] }, "pipelines.CronTrigger": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3691,13 +4667,42 @@ } ] }, + "pipelines.DayOfWeek": { + "oneOf": [ + { + "type": "string", + "description": "Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour).\nIf not specified all days of the week will be used.", + "enum": [ + "MONDAY", + "TUESDAY", + "WEDNESDAY", + "THURSDAY", + "FRIDAY", + "SATURDAY", + "SUNDAY" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "pipelines.DeploymentKind": { - "type": "string", - "description": "The deployment method that manages the pipeline:\n- BUNDLE: The pipeline is managed by a Databricks Asset Bundle.\n", - "enum": ["BUNDLE"] + "oneOf": [ + { + "type": "string", + "description": "The deployment method that manages the pipeline:\n- BUNDLE: The pipeline is managed by a Databricks Asset Bundle.\n", + "enum": ["BUNDLE"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "pipelines.FileLibrary": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3715,7 +4720,7 @@ ] }, "pipelines.Filters": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3737,17 +4742,21 @@ ] }, "pipelines.IngestionConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { + "report": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec", + "description": "Select a specific source report." + }, "schema": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec", - "description": "Select tables from a specific source schema." + "description": "Select all tables from a specific source schema." }, "table": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec", - "description": "Select tables from a specific source table." + "description": "Select a specific source table." } }, "additionalProperties": false @@ -3759,13 +4768,17 @@ ] }, "pipelines.IngestionGatewayPipelineDefinition": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "connection_id": { "$ref": "#/$defs/string", - "description": "Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source." + "description": "[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source." + }, + "connection_name": { + "$ref": "#/$defs/string", + "description": "Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source." }, "gateway_storage_catalog": { "$ref": "#/$defs/string", @@ -3789,17 +4802,17 @@ ] }, "pipelines.IngestionPipelineDefinition": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "connection_name": { "$ref": "#/$defs/string", - "description": "Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the source. Specify either ingestion_gateway_id or connection_name." + "description": "Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on." }, "ingestion_gateway_id": { "$ref": "#/$defs/string", - "description": "Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate with the source. Specify either ingestion_gateway_id or connection_name." + "description": "Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server." }, "objects": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig", @@ -3819,7 +4832,7 @@ ] }, "pipelines.ManualTrigger": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": false @@ -3831,7 +4844,7 @@ ] }, "pipelines.NotebookLibrary": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3849,7 +4862,7 @@ ] }, "pipelines.Notifications": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3871,7 +4884,7 @@ ] }, "pipelines.PipelineCluster": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3961,7 +4974,7 @@ ] }, "pipelines.PipelineClusterAutoscale": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -3975,8 +4988,7 @@ }, "mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscaleMode", - "description": "Databricks Enhanced Autoscaling optimizes cluster utilization by automatically\nallocating cluster resources based on workload volume, with minimal impact to\nthe data processing latency of your pipelines. Enhanced Autoscaling is available\nfor `updates` clusters only. The legacy autoscaling feature is used for `maintenance`\nclusters.\n", - "enum": ["ENHANCED", "LEGACY"] + "description": "Databricks Enhanced Autoscaling optimizes cluster utilization by automatically\nallocating cluster resources based on workload volume, with minimal impact to\nthe data processing latency of your pipelines. Enhanced Autoscaling is available\nfor `updates` clusters only. The legacy autoscaling feature is used for `maintenance`\nclusters.\n" } }, "additionalProperties": false, @@ -3989,10 +5001,20 @@ ] }, "pipelines.PipelineClusterAutoscaleMode": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Databricks Enhanced Autoscaling optimizes cluster utilization by automatically\nallocating cluster resources based on workload volume, with minimal impact to\nthe data processing latency of your pipelines. Enhanced Autoscaling is available\nfor `updates` clusters only. The legacy autoscaling feature is used for `maintenance`\nclusters.\n", + "enum": ["ENHANCED", "LEGACY"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "pipelines.PipelineDeployment": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4014,7 +5036,7 @@ ] }, "pipelines.PipelineLibrary": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4024,19 +5046,73 @@ }, "jar": { "$ref": "#/$defs/string", - "description": "URI of the jar to be installed. Currently only DBFS is supported.\n" + "description": "URI of the jar to be installed. Currently only DBFS is supported.\n" + }, + "maven": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", + "description": "Specification of a maven library to be installed.\n" + }, + "notebook": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary", + "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n" + }, + "whl": { + "$ref": "#/$defs/string", + "description": "URI of the whl to be installed." + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "pipelines.PipelineTrigger": { + "oneOf": [ + { + "type": "object", + "properties": { + "cron": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger" + }, + "manual": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "pipelines.ReportSpec": { + "oneOf": [ + { + "type": "object", + "properties": { + "destination_catalog": { + "$ref": "#/$defs/string", + "description": "Required. Destination catalog to store table." }, - "maven": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", - "description": "Specification of a maven library to be installed.\n" + "destination_schema": { + "$ref": "#/$defs/string", + "description": "Required. Destination schema to store table." }, - "notebook": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary", - "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n" + "destination_table": { + "$ref": "#/$defs/string", + "description": "Required. Destination table name. The pipeline fails if a table with that name already exists." }, - "whl": { + "source_url": { "$ref": "#/$defs/string", - "description": "URI of the whl to be installed." + "description": "Required. Report URL in the source system." + }, + "table_configuration": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", + "description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object." } }, "additionalProperties": false @@ -4047,19 +5123,26 @@ } ] }, - "pipelines.PipelineTrigger": { - "anyOf": [ + "pipelines.RestartWindow": { + "oneOf": [ { "type": "object", "properties": { - "cron": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger" + "days_of_week": { + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.DayOfWeek", + "description": "Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour).\nIf not specified all days of the week will be used." }, - "manual": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger" + "start_hour": { + "$ref": "#/$defs/int", + "description": "An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.\nContinuous pipeline restart is triggered only within a five-hour window starting at this hour." + }, + "time_zone_id": { + "$ref": "#/$defs/string", + "description": "Time zone id of restart window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.\nIf not specified, UTC will be used." } }, - "additionalProperties": false + "additionalProperties": false, + "required": ["start_hour"] }, { "type": "string", @@ -4068,7 +5151,7 @@ ] }, "pipelines.SchemaSpec": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4102,7 +5185,7 @@ ] }, "pipelines.TableSpec": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4116,7 +5199,7 @@ }, "destination_table": { "$ref": "#/$defs/string", - "description": "Optional. Destination table name. The pipeline fails If a table with that name already exists. If not set, the source table name is used." + "description": "Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used." }, "source_catalog": { "$ref": "#/$defs/string", @@ -4144,7 +5227,7 @@ ] }, "pipelines.TableSpecificConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4158,8 +5241,11 @@ }, "scd_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScdType", - "description": "The SCD type to use to ingest the table.", - "enum": ["SCD_TYPE_1", "SCD_TYPE_2"] + "description": "The SCD type to use to ingest the table." + }, + "sequence_by": { + "$ref": "#/$defs/slice/string", + "description": "The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order." } }, "additionalProperties": false @@ -4171,18 +5257,30 @@ ] }, "pipelines.TableSpecificConfigScdType": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "The SCD type to use to ingest the table.", + "enum": ["SCD_TYPE_1", "SCD_TYPE_2"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.Ai21LabsConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "ai21labs_api_key": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`." }, "ai21labs_api_key_plaintext": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`." } }, "additionalProperties": false @@ -4194,7 +5292,7 @@ ] }, "serving.AiGatewayConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4224,7 +5322,7 @@ ] }, "serving.AiGatewayGuardrailParameters": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4254,14 +5352,13 @@ ] }, "serving.AiGatewayGuardrailPiiBehavior": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "behavior": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior", - "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.", - "enum": ["NONE", "BLOCK"] + "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned." } }, "additionalProperties": false, @@ -4274,10 +5371,20 @@ ] }, "serving.AiGatewayGuardrailPiiBehaviorBehavior": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.", + "enum": ["NONE", "BLOCK"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.AiGatewayGuardrails": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4299,7 +5406,7 @@ ] }, "serving.AiGatewayInferenceTableConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4329,7 +5436,7 @@ ] }, "serving.AiGatewayRateLimit": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4339,13 +5446,11 @@ }, "key": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey", - "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", - "enum": ["user", "endpoint"] + "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified." }, "renewal_period": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod", - "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.", - "enum": ["minute"] + "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported." } }, "additionalProperties": false, @@ -4358,13 +5463,33 @@ ] }, "serving.AiGatewayRateLimitKey": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", + "enum": ["user", "endpoint"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.AiGatewayRateLimitRenewalPeriod": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.", + "enum": ["minute"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.AiGatewayUsageTrackingConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4382,7 +5507,7 @@ ] }, "serving.AmazonBedrockConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4408,8 +5533,7 @@ }, "bedrock_provider": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider", - "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", - "enum": ["anthropic", "cohere", "ai21labs", "amazon"] + "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon." } }, "additionalProperties": false, @@ -4422,10 +5546,20 @@ ] }, "serving.AmazonBedrockConfigBedrockProvider": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", + "enum": ["anthropic", "cohere", "ai21labs", "amazon"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.AnthropicConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4447,7 +5581,7 @@ ] }, "serving.AutoCaptureConfigInput": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4477,7 +5611,7 @@ ] }, "serving.CohereConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4503,7 +5637,7 @@ ] }, "serving.DatabricksModelServingConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4530,7 +5664,7 @@ ] }, "serving.EndpointCoreConfigInput": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4560,7 +5694,7 @@ ] }, "serving.EndpointTag": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4583,7 +5717,7 @@ ] }, "serving.ExternalModel": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4625,17 +5759,7 @@ }, "provider": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider", - "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n", - "enum": [ - "ai21labs", - "anthropic", - "amazon-bedrock", - "cohere", - "databricks-model-serving", - "google-cloud-vertex-ai", - "openai", - "palm" - ] + "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n" }, "task": { "$ref": "#/$defs/string", @@ -4652,24 +5776,47 @@ ] }, "serving.ExternalModelProvider": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n", + "enum": [ + "ai21labs", + "anthropic", + "amazon-bedrock", + "cohere", + "databricks-model-serving", + "google-cloud-vertex-ai", + "openai", + "palm" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.GoogleCloudVertexAiConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "private_key": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`" }, "private_key_plaintext": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`." }, "project_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This is the Google Cloud project id that the service account is associated with." }, "region": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions." } }, "additionalProperties": false @@ -4681,42 +5828,53 @@ ] }, "serving.OpenAiConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "microsoft_entra_client_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.\n" }, "microsoft_entra_client_secret": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.\nIf you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n" }, "microsoft_entra_client_secret_plaintext": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The client secret used for Microsoft Entra ID authentication provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n" }, "microsoft_entra_tenant_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.\n" }, "openai_api_base": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This is a field to provide a customized base URl for the OpenAI API.\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\nFor other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.\n" }, "openai_api_key": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`." }, "openai_api_key_plaintext": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`." }, "openai_api_type": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This is an optional field to specify the type of OpenAI API to use.\nFor Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security\naccess validation protocol. For access token validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.\n" }, "openai_api_version": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This is an optional field to specify the OpenAI API version.\nFor Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to\nutilize, specified by a date.\n" }, "openai_deployment_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This field is only required for Azure OpenAI and is the name of the deployment resource for the\nAzure OpenAI service.\n" }, "openai_organization": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "This is an optional field to specify the organization in OpenAI or Azure OpenAI.\n" } }, "additionalProperties": false @@ -4728,15 +5886,17 @@ ] }, "serving.PaLmConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { "palm_api_key": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`." }, "palm_api_key_plaintext": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "description": "The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`." } }, "additionalProperties": false @@ -4748,7 +5908,7 @@ ] }, "serving.RateLimit": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4758,13 +5918,11 @@ }, "key": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey", - "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", - "enum": ["user", "endpoint"] + "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified." }, "renewal_period": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod", - "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", - "enum": ["minute"] + "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported." } }, "additionalProperties": false, @@ -4777,13 +5935,33 @@ ] }, "serving.RateLimitKey": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", + "enum": ["user", "endpoint"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.RateLimitRenewalPeriod": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", + "enum": ["minute"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.Route": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4806,7 +5984,7 @@ ] }, "serving.ServedEntityInput": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4864,7 +6042,7 @@ ] }, "serving.ServedModelInput": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4902,19 +6080,11 @@ }, "workload_size": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize", - "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n", - "enum": ["Small", "Medium", "Large"] + "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n" }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType", - "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n", - "enum": [ - "CPU", - "GPU_SMALL", - "GPU_MEDIUM", - "GPU_LARGE", - "MULTIGPU_MEDIUM" - ] + "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n" } }, "additionalProperties": false, @@ -4931,13 +6101,39 @@ ] }, "serving.ServedModelInputWorkloadSize": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n", + "enum": ["Small", "Medium", "Large"] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.ServedModelInputWorkloadType": { - "type": "string" + "oneOf": [ + { + "type": "string", + "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n", + "enum": [ + "CPU", + "GPU_SMALL", + "GPU_MEDIUM", + "GPU_LARGE", + "MULTIGPU_MEDIUM" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "serving.TrafficConfig": { - "anyOf": [ + "oneOf": [ { "type": "object", "properties": { @@ -4959,7 +6155,7 @@ } }, "int": { - "anyOf": [ + "oneOf": [ { "type": "integer" }, @@ -4986,7 +6182,7 @@ ] }, "int64": { - "anyOf": [ + "oneOf": [ { "type": "integer" }, @@ -5019,8 +6215,22 @@ "cli": { "bundle": { "config": { + "resources.App": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.App" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.Cluster": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5033,8 +6243,22 @@ } ] }, + "resources.Dashboard": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Dashboard" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.Job": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5048,7 +6272,7 @@ ] }, "resources.MlflowExperiment": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5062,7 +6286,7 @@ ] }, "resources.MlflowModel": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5076,7 +6300,7 @@ ] }, "resources.ModelServingEndpoint": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5090,7 +6314,7 @@ ] }, "resources.Pipeline": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5104,7 +6328,7 @@ ] }, "resources.QualityMonitor": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5118,7 +6342,7 @@ ] }, "resources.RegisteredModel": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5132,7 +6356,7 @@ ] }, "resources.Schema": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5145,8 +6369,22 @@ } ] }, + "resources.Volume": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Volume" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "variable.TargetVariable": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5160,7 +6398,7 @@ ] }, "variable.Variable": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5175,7 +6413,7 @@ } }, "config.Artifact": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5189,7 +6427,7 @@ ] }, "config.Command": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5203,7 +6441,7 @@ ] }, "config.Target": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5220,8 +6458,22 @@ } } }, + "interface": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/interface" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "string": { - "anyOf": [ + "oneOf": [ { "type": "object", "additionalProperties": { @@ -5242,7 +6494,7 @@ "bundle": { "config": { "resources.Grant": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5256,7 +6508,7 @@ ] }, "resources.Permission": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5271,7 +6523,7 @@ } }, "config.ArtifactFile": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5288,8 +6540,22 @@ }, "databricks-sdk-go": { "service": { + "apps.AppResource": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResource" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "catalog.MonitorMetric": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5303,7 +6569,7 @@ ] }, "compute.InitScriptInfo": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5317,7 +6583,7 @@ ] }, "compute.Library": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5331,7 +6597,7 @@ ] }, "jobs.JobCluster": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5345,7 +6611,7 @@ ] }, "jobs.JobEnvironment": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5359,7 +6625,7 @@ ] }, "jobs.JobParameterDefinition": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5373,7 +6639,7 @@ ] }, "jobs.JobsHealthRule": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5387,7 +6653,7 @@ ] }, "jobs.SqlTaskSubscription": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5401,7 +6667,7 @@ ] }, "jobs.Task": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5415,7 +6681,7 @@ ] }, "jobs.TaskDependency": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5429,7 +6695,7 @@ ] }, "jobs.Webhook": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5443,7 +6709,7 @@ ] }, "ml.ExperimentTag": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5457,7 +6723,7 @@ ] }, "ml.ModelTag": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5471,7 +6737,7 @@ ] }, "ml.ModelVersion": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5485,7 +6751,7 @@ ] }, "ml.ModelVersionTag": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5498,8 +6764,22 @@ } ] }, + "pipelines.DayOfWeek": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DayOfWeek" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "pipelines.IngestionConfig": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5513,7 +6793,7 @@ ] }, "pipelines.Notifications": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5527,7 +6807,7 @@ ] }, "pipelines.PipelineCluster": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5541,7 +6821,7 @@ ] }, "pipelines.PipelineLibrary": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5555,7 +6835,7 @@ ] }, "serving.AiGatewayRateLimit": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5569,7 +6849,7 @@ ] }, "serving.EndpointTag": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5583,7 +6863,7 @@ ] }, "serving.RateLimit": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5597,7 +6877,7 @@ ] }, "serving.Route": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5611,7 +6891,7 @@ ] }, "serving.ServedEntityInput": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5625,7 +6905,7 @@ ] }, "serving.ServedModelInput": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5643,7 +6923,7 @@ } }, "string": { - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -5664,41 +6944,59 @@ "type": "object", "properties": { "artifacts": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", + "description": "Defines the attributes to build an artifact" }, "bundle": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", + "description": "The attributes of the bundle.", + "markdownDescription": "The attributes of the bundle. See [bundle](https://docs.databricks.com/dev-tools/bundles/settings.html#bundle)" }, "experimental": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental", + "description": "Defines attributes for experimental features." }, "include": { - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "description": "Specifies a list of path globs that contain configuration files to include within the bundle.", + "markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include)" }, "permissions": { - "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission" + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", + "description": "Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle", + "markdownDescription": "Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle. See [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html)." }, "presets": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", + "description": "Defines bundle deployment presets.", + "markdownDescription": "Defines bundle deployment presets. See [presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#presets)." }, "resources": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", + "description": "Specifies information about the Databricks resources used by the bundle", + "markdownDescription": "Specifies information about the Databricks resources used by the bundle. See [link](https://docs.databricks.com/dev-tools/bundles/resources.html)." }, "run_as": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", + "description": "The identity to use to run the bundle." }, "sync": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", + "description": "The files and file paths to include or exclude in the bundle.", + "markdownDescription": "The files and file paths to include or exclude in the bundle. See [link](https://docs.databricks.com/dev-tools/bundles/)" }, "targets": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", + "description": "Defines deployment targets for the bundle." }, "variables": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.Variable" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.Variable", + "description": "A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable." }, "workspace": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", + "description": "Defines the Databricks workspace for the bundle." } }, - "additionalProperties": false + "additionalProperties": {} }