diff --git a/.codegen.json b/.codegen.json index 25c666260..a1886bd80 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1,5 +1,6 @@ { "formatter": "yapf -pri $FILENAMES && autoflake -i $FILENAMES && isort $FILENAMES", + "changelog_config": ".codegen/changelog_config.yml", "template_libraries": [ ".codegen/lib.tmpl" ], diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl index d5b83e3f2..d54e9dfff 100644 --- a/.codegen/__init__.py.tmpl +++ b/.codegen/__init__.py.tmpl @@ -5,10 +5,12 @@ from databricks.sdk.credentials_provider import CredentialsStrategy from databricks.sdk.mixins.files import DbfsExt from databricks.sdk.mixins.compute import ClustersExt from databricks.sdk.mixins.workspace import WorkspaceExt -{{- range .Services}} {{if not .IsDataPlane}} -from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}}{{end}} +from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt +{{- range .Services}} +from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}} from databricks.sdk.service.provisioning import Workspace from databricks.sdk import azure +from typing import Optional {{$args := list "host" "account_id" "username" "password" "client_id" "client_secret" "token" "profile" "config_file" "azure_workspace_resource_id" "azure_client_secret" @@ -16,7 +18,7 @@ from databricks.sdk import azure "google_credentials" "google_service_account" }} {{- define "api" -}} - {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" -}} + {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" "ServingEndpointsAPI" "ServingEndpointsExt" -}} {{- $genApi := concat .PascalName "API" -}} {{- getOrDefault $mixins $genApi $genApi -}} {{- end -}} @@ -41,14 +43,14 @@ class WorkspaceClient: """ The WorkspaceClient is a client for the workspace-level Databricks REST API. """ - def __init__(self, *{{range $args}}, {{.}}: str = None{{end}}, - debug_truncate_bytes: int = None, - debug_headers: bool = None, + def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}}, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, product="unknown", product_version="0.0.0", - credentials_strategy: CredentialsStrategy = None, - credentials_provider: CredentialsStrategy = None, - config: client.Config = None): + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None): if not config: config = client.Config({{range $args}}{{.}}={{.}}, {{end}} credentials_strategy=credentials_strategy, @@ -61,8 +63,20 @@ class WorkspaceClient: self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) - {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}} - self._{{.SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} + {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}} + {{.SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} + + {{- range .Services}} + {{- if and (not .IsAccounts) (not .HasParent)}} + {{- if .IsDataPlane}} + self._{{.SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}}) + {{- else if .HasDataPlaneAPI}} + self._{{.SnakeName}} = {{.SnakeName}} + {{- else}} + self._{{.SnakeName}} = {{template "api" .}}(self._api_client) + {{- end -}} + {{- end -}} + {{end}} @property def config(self) -> client.Config: @@ -76,7 +90,7 @@ class WorkspaceClient: def dbutils(self) -> dbutils.RemoteDbUtils: return self._dbutils - {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}} + {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent)}} @property def {{.SnakeName}}(self) -> {{template "api" .}}: {{if .Description}}"""{{.Summary}}"""{{end}} @@ -98,14 +112,14 @@ class AccountClient: The AccountClient is a client for the account-level Databricks REST API. """ - def __init__(self, *{{range $args}}, {{.}}: str = None{{end}}, - debug_truncate_bytes: int = None, - debug_headers: bool = None, + def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}}, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, product="unknown", product_version="0.0.0", - credentials_strategy: CredentialsStrategy = None, - credentials_provider: CredentialsStrategy = None, - config: client.Config = None): + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None): if not config: config = client.Config({{range $args}}{{.}}={{.}}, {{end}} credentials_strategy=credentials_strategy, @@ -117,8 +131,20 @@ class AccountClient: self._config = config.copy() self._api_client = client.ApiClient(self._config) - {{- range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}} - self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} + {{- range .Services}}{{if and .IsAccounts (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}} + {{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} + + {{- range .Services}} + {{- if and .IsAccounts (not .HasParent)}} + {{- if .IsDataPlane}} + self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}}) + {{- else if .HasDataPlaneAPI}} + self._{{(.TrimPrefix "account").SnakeName}} = {{(.TrimPrefix "account").SnakeName}} + {{- else}} + self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client) + {{- end -}} + {{- end -}} + {{end}} @property def config(self) -> client.Config: @@ -128,7 +154,7 @@ class AccountClient: def api_client(self) -> client.ApiClient: return self._api_client - {{- range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}} + {{- range .Services}}{{if and .IsAccounts (not .HasParent)}} @property def {{(.TrimPrefix "account").SnakeName}}(self) -> {{template "api" .}}:{{if .Description}} """{{.Summary}}"""{{end}} diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index c4b47ca14..2d9cb6d86 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -7437dabb9dadee402c1fc060df4c1ce8cc5369f0 \ No newline at end of file +cf9c61453990df0f9453670f2fe68e1b128647a2 \ No newline at end of file diff --git a/.codegen/changelog.md.tmpl b/.codegen/changelog.md.tmpl index 37bf395a7..c9f2e87c4 100644 --- a/.codegen/changelog.md.tmpl +++ b/.codegen/changelog.md.tmpl @@ -1,13 +1,17 @@ # Version changelog ## {{.Version}} +{{- range .GroupChanges}} -{{range .Changes -}} +### {{.Type.Message}} +{{range .Changes}} * {{.}}. -{{end}}{{- if .ApiChanges}} -API Changes: -{{range .ApiChanges}} - * {{.Action}} {{template "what" .}}{{if .Extra}} {{.Extra}}{{with .Other}} {{template "what" .}}{{end}}{{end}}. +{{- end}} +{{end}} +{{if .ApiChanges}} +### API Changes: +{{range .ApiChanges.GroupDiff}} + * {{.Action}} {{template "group-what" .}}{{if .Extra}} {{.Extra}}{{with .Other}} {{template "other-what" .}}{{end}}{{end}}. {{- end}} OpenAPI SHA: {{.Sha}}, Date: {{.Changed}} @@ -20,7 +24,35 @@ Dependency updates: ## {{.PrevVersion}} -{{- define "what" -}} +{{- define "group-what" -}} +{{if gt (len .Changes) 1 -}} {{template "single-what" .Changes.First}}{{end -}} +{{range .Changes.Middle -}}, {{template "single-what" .}}{{end -}} +{{if gt (len .Changes) 1}} and {{end}}{{template "single-what" .Changes.Last}}{{template "suffix-what" .}} +{{- end -}} + +{{- define "single-what" -}} + {{if eq .X "package" -}} + `databricks.sdk.service.{{.Package.Name}}` + {{- else if eq .X "service" -}} + {{template "service" .Service}} + {{- else if eq .X "method" -}} + `{{.Method.SnakeName}}()` + {{- else if eq .X "entity" -}} + {{template "entity" .Entity}} + {{- else if eq .X "field" -}} + `{{.Field.SnakeName}}` + {{- end}} +{{- end -}} + +{{- define "suffix-what" -}} + {{if eq .Type "package" }} package{{if gt (len .Changes) 1}}s{{end}} + {{- else if eq .Type "method" }} method{{if gt (len .Changes) 1}}s{{end}} for {{template "service" .Parent.Service}} + {{- else if eq .Type "entity" }} dataclass{{if gt (len .Changes) 1}}es{{end}} + {{- else if eq .Type "field" }} field{{if gt (len .Changes) 1}}s{{end}} for {{template "entity" .Parent.Entity}} + {{- end}} +{{- end -}} + +{{- define "other-what" -}} {{if eq .X "package" -}} `databricks.sdk.service.{{.Package.Name}}` package {{- else if eq .X "service" -}} diff --git a/.codegen/changelog_config.yml b/.codegen/changelog_config.yml new file mode 100644 index 000000000..ed2fe1046 --- /dev/null +++ b/.codegen/changelog_config.yml @@ -0,0 +1,14 @@ +change_types: + - message: New Features and Improvements + tag: "[Feature]" + - message: Bug Fixes + tag: "[Fix]" + - message: Documentation + tag: "[Doc]" + - message: Internal Changes + tag: "[Internal]" + # Does not appear in the Changelog. Only for PR validation. + - message: Release + tag: "[Release]" + # Default for messages without a tag + - message: Other Changes \ No newline at end of file diff --git a/.codegen/error_overrides.py.tmpl b/.codegen/error_overrides.py.tmpl index 6bb85d6ca..adcfea555 100644 --- a/.codegen/error_overrides.py.tmpl +++ b/.codegen/error_overrides.py.tmpl @@ -11,9 +11,9 @@ _ALL_OVERRIDES = [ debug_name="{{.Name}}", path_regex=re.compile(r'{{.PathRegex}}'), verb="{{.Verb}}", - status_code_matcher=re.compile(r'{{.StatusCodeMatcher}}'), - error_code_matcher=re.compile(r'{{.ErrorCodeMatcher}}'), - message_matcher=re.compile(r'{{.MessageMatcher}}'), + status_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .StatusCodeMatcher}}'), + error_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .ErrorCodeMatcher}}'), + message_matcher=re.compile(r'{{replaceAll "'" "\\'" .MessageMatcher}}'), custom_error={{.OverrideErrorCode.PascalName}}, ), {{- end }} diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl index 39892b43c..4307e0913 100644 --- a/.codegen/service.py.tmpl +++ b/.codegen/service.py.tmpl @@ -8,8 +8,12 @@ from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO import time import random import logging +import requests + +from ..data_plane import DataPlaneService from ..errors import OperationTimeout, OperationFailed from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token _LOG = logging.getLogger('databricks.sdk') @@ -100,12 +104,16 @@ class {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}:{{if .Descriptio {{- end -}} {{- end -}} -{{range .Services}} {{if not .IsDataPlane}} +{{range .Services}} class {{.PascalName}}API:{{if .Description}} """{{.Comment " " 110}}""" {{end}} - def __init__(self, api_client): + def __init__(self, api_client{{if .IsDataPlane}}, control_plane{{end}}): self._api = api_client + {{if .IsDataPlane -}} + self._control_plane = control_plane + self._data_plane_service = DataPlaneService() + {{end -}} {{range .Subservices}} self._{{.SnakeName}} = {{.PascalName}}API(self._api){{end}} @@ -183,6 +191,9 @@ class {{.PascalName}}API:{{if .Description}} {{if .Request -}} {{template "method-serialize" .}} {{- end}} + {{- if .Service.IsDataPlane}} + {{template "data-plane" .}} + {{- end}} {{template "method-headers" . }} {{if .Response.HasHeaderField -}} {{template "method-response-headers" . }} @@ -195,7 +206,27 @@ class {{.PascalName}}API:{{if .Description}} return self.{{template "safe-snake-name" .}}({{range $i, $x := .Request.Fields}}{{if $i}}, {{end}}{{template "safe-snake-name" .}}={{template "safe-snake-name" .}}{{end}}).result(timeout=timeout) {{end}} {{end -}} -{{end}} +{{- end}} + +{{define "data-plane" -}} + def info_getter(): + response = self._control_plane.{{.Service.DataPlaneInfoMethod.SnakeName}}( + {{- range .Service.DataPlaneInfoMethod.Request.Fields }} + {{.SnakeName}} = {{.SnakeName}}, + {{- end}} + ) + if response.{{(index .DataPlaneInfoFields 0).SnakeName}} is None: + raise Exception("Resource does not support direct Data Plane access") + return response{{range .DataPlaneInfoFields}}.{{.SnakeName}}{{end}} + + get_params = [{{- range .Service.DataPlaneInfoMethod.Request.Fields }}{{.SnakeName}},{{end}}] + data_plane_details = self._data_plane_service.get_data_plane_details('{{.SnakeName}}', get_params, info_getter, self._api.get_oauth_token) + token = data_plane_details.token + + def auth(r: requests.PreparedRequest) -> requests.PreparedRequest: + authorization = f"{token.token_type} {token.access_token}" + r.headers["Authorization"] = authorization + return r {{- end}} {{define "method-parameters" -}} @@ -264,7 +295,7 @@ class {{.PascalName}}API:{{if .Description}} {{if .NeedsOffsetDedupe -}} # deduplicate items that may have been added during iteration seen = set() - {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.0/clusters/events")) }} + {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.1/clusters/events")) }} query['{{.Pagination.Offset.Name}}'] = {{- if eq .Pagination.Increment 1 -}} 1 @@ -290,7 +321,7 @@ class {{.PascalName}}API:{{if .Description}} if '{{.Pagination.Token.Bind.Name}}' not in json or not json['{{.Pagination.Token.Bind.Name}}']: return {{if or (eq "GET" .Verb) (eq "HEAD" .Verb)}}query{{else}}body{{end}}['{{.Pagination.Token.PollField.Name}}'] = json['{{.Pagination.Token.Bind.Name}}'] - {{- else if eq .Path "/api/2.0/clusters/events" -}} + {{- else if eq .Path "/api/2.1/clusters/events" -}} if 'next_page' not in json or not json['next_page']: return body = json['next_page'] @@ -319,25 +350,32 @@ class {{.PascalName}}API:{{if .Description}} {{- else if .Response.MapValue -}} return res {{- else -}} - return {{.Response.PascalName}}.from_dict(res) + return {{template "type" .Response}}.from_dict(res) {{- end}} {{- end}} {{- end}} {{define "method-do" -}} -self._api.do('{{.Verb}}', - {{ template "path" . }} - {{if .Request}} - {{- if .Request.HasQueryField}}, query=query{{end}} - {{- if .Request.MapValue}}, body=contents - {{- else if .Request.HasJsonField}}, body=body{{end}} - {{end}} - , headers=headers - {{if .Response.HasHeaderField -}} - , response_headers=response_headers - {{- end}} - {{- if and .IsRequestByteStream .RequestBodyField }}, data={{template "safe-snake-name" .RequestBodyField}}{{ end }} - {{- if .IsResponseByteStream }}, raw=True{{ end }}) + self._api.do('{{.Verb}}', + {{- if .Service.IsDataPlane -}} + url=data_plane_details.endpoint_url + {{- else -}} + {{ template "path" . }} + {{- end -}} + {{if .Request}} + {{- if .Request.HasQueryField}}, query=query{{end}} + {{- if .Request.MapValue}}, body=contents + {{- else if .Request.HasJsonField}}, body=body{{end}} + {{end}} + , headers=headers + {{if .Response.HasHeaderField -}} + , response_headers=response_headers + {{- end}} + {{- if and .IsRequestByteStream .RequestBodyField }}, data={{template "safe-snake-name" .RequestBodyField}}{{ end }} + {{- if .Service.IsDataPlane -}} + ,auth=auth + {{- end -}} + {{- if .IsResponseByteStream }}, raw=True{{ end }}) {{- end}} {{- define "path" -}} diff --git a/.gitattributes b/.gitattributes index c37d866dc..c8e5b2f0b 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1,6 +1,7 @@ databricks/sdk/__init__.py linguist-generated=true databricks/sdk/errors/overrides.py linguist-generated=true databricks/sdk/errors/platform.py linguist-generated=true +databricks/sdk/service/apps.py linguist-generated=true databricks/sdk/service/billing.py linguist-generated=true databricks/sdk/service/catalog.py linguist-generated=true databricks/sdk/service/compute.py linguist-generated=true @@ -19,277 +20,3 @@ databricks/sdk/service/sharing.py linguist-generated=true databricks/sdk/service/sql.py linguist-generated=true databricks/sdk/service/vectorsearch.py linguist-generated=true databricks/sdk/service/workspace.py linguist-generated=true -examples/account/billable_usage/download_usage_download.py linguist-generated=true -examples/account/budgets/create_budgets.py linguist-generated=true -examples/account/budgets/get_budgets.py linguist-generated=true -examples/account/budgets/list_budgets.py linguist-generated=true -examples/account/budgets/update_budgets.py linguist-generated=true -examples/account/credentials/create_credentials.py linguist-generated=true -examples/account/credentials/create_log_delivery.py linguist-generated=true -examples/account/credentials/create_workspaces.py linguist-generated=true -examples/account/credentials/get_credentials.py linguist-generated=true -examples/account/credentials/list_credentials.py linguist-generated=true -examples/account/encryption_keys/create_encryption_keys.py linguist-generated=true -examples/account/encryption_keys/get_encryption_keys.py linguist-generated=true -examples/account/encryption_keys/list_encryption_keys.py linguist-generated=true -examples/account/io/read_usage_download.py linguist-generated=true -examples/account/log_delivery/create_log_delivery.py linguist-generated=true -examples/account/log_delivery/get_log_delivery.py linguist-generated=true -examples/account/log_delivery/list_log_delivery.py linguist-generated=true -examples/account/metastore_assignments/list_metastore_assignments.py linguist-generated=true -examples/account/networks/create_networks.py linguist-generated=true -examples/account/networks/get_networks.py linguist-generated=true -examples/account/networks/list_networks.py linguist-generated=true -examples/account/private_access/create_private_access.py linguist-generated=true -examples/account/private_access/get_private_access.py linguist-generated=true -examples/account/private_access/list_private_access.py linguist-generated=true -examples/account/private_access/replace_private_access.py linguist-generated=true -examples/account/service_principals/create_account_service_principal.py linguist-generated=true -examples/account/service_principals/create_workspace_assignment_on_aws.py linguist-generated=true -examples/account/service_principals/get_account_service_principal.py linguist-generated=true -examples/account/service_principals/list_account_service_principal.py linguist-generated=true -examples/account/service_principals/patch_account_service_principal.py linguist-generated=true -examples/account/service_principals/update_account_service_principal.py linguist-generated=true -examples/account/storage/create_log_delivery.py linguist-generated=true -examples/account/storage/create_storage.py linguist-generated=true -examples/account/storage/create_workspaces.py linguist-generated=true -examples/account/storage/get_storage.py linguist-generated=true -examples/account/storage/list_storage.py linguist-generated=true -examples/account/users/create_account_users.py linguist-generated=true -examples/account/users/delete_account_users.py linguist-generated=true -examples/account/users/get_account_users.py linguist-generated=true -examples/account/users/patch_account_users.py linguist-generated=true -examples/account/vpc_endpoints/create_vpc_endpoints.py linguist-generated=true -examples/account/vpc_endpoints/get_vpc_endpoints.py linguist-generated=true -examples/account/vpc_endpoints/list_vpc_endpoints.py linguist-generated=true -examples/account/workspace_assignment/list_workspace_assignment_on_aws.py linguist-generated=true -examples/account/workspace_assignment/update_workspace_assignment_on_aws.py linguist-generated=true -examples/account/workspaces/create_workspaces.py linguist-generated=true -examples/account/workspaces/get_workspaces.py linguist-generated=true -examples/account/workspaces/list_workspaces.py linguist-generated=true -examples/account/workspaces/update_workspaces.py linguist-generated=true -examples/workspace/alerts/create_alerts.py linguist-generated=true -examples/workspace/alerts/get_alerts.py linguist-generated=true -examples/workspace/alerts/list_alerts.py linguist-generated=true -examples/workspace/alerts/update_alerts.py linguist-generated=true -examples/workspace/catalogs/create_catalog_workspace_bindings.py linguist-generated=true -examples/workspace/catalogs/create_catalogs.py linguist-generated=true -examples/workspace/catalogs/create_schemas.py linguist-generated=true -examples/workspace/catalogs/create_shares.py linguist-generated=true -examples/workspace/catalogs/create_tables.py linguist-generated=true -examples/workspace/catalogs/create_volumes.py linguist-generated=true -examples/workspace/catalogs/get_catalogs.py linguist-generated=true -examples/workspace/catalogs/list_catalogs.py linguist-generated=true -examples/workspace/catalogs/update_catalog_workspace_bindings.py linguist-generated=true -examples/workspace/catalogs/update_catalogs.py linguist-generated=true -examples/workspace/cluster_policies/create_cluster_policies.py linguist-generated=true -examples/workspace/cluster_policies/edit_cluster_policies.py linguist-generated=true -examples/workspace/cluster_policies/get_cluster_policies.py linguist-generated=true -examples/workspace/cluster_policies/list_cluster_policies.py linguist-generated=true -examples/workspace/clusters/change_owner_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/create_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/delete_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/edit_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/ensure_cluster_is_running_commands_direct_usage.py linguist-generated=true -examples/workspace/clusters/events_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/get_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/list_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/list_node_types_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/pin_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/resize_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/restart_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/select_node_type_instance_pools.py linguist-generated=true -examples/workspace/clusters/select_spark_version_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/start_clusters_api_integration.py linguist-generated=true -examples/workspace/clusters/unpin_clusters_api_integration.py linguist-generated=true -examples/workspace/command_context/execute_commands.py linguist-generated=true -examples/workspace/command_execution/create_commands_direct_usage.py linguist-generated=true -examples/workspace/command_execution/execute_commands_direct_usage.py linguist-generated=true -examples/workspace/command_execution/start_commands.py linguist-generated=true -examples/workspace/connections/create_connections.py linguist-generated=true -examples/workspace/connections/get_connections.py linguist-generated=true -examples/workspace/connections/list_connections.py linguist-generated=true -examples/workspace/connections/update_connections.py linguist-generated=true -examples/workspace/current_user/me_current_user.py linguist-generated=true -examples/workspace/current_user/me_tokens.py linguist-generated=true -examples/workspace/dashboards/create_dashboards.py linguist-generated=true -examples/workspace/dashboards/delete_dashboards.py linguist-generated=true -examples/workspace/dashboards/get_dashboards.py linguist-generated=true -examples/workspace/dashboards/list_dashboards.py linguist-generated=true -examples/workspace/dashboards/restore_dashboards.py linguist-generated=true -examples/workspace/data_sources/list_alerts.py linguist-generated=true -examples/workspace/data_sources/list_queries.py linguist-generated=true -examples/workspace/databricks/must_tokens.py linguist-generated=true -examples/workspace/experiments/create_experiment_experiments.py linguist-generated=true -examples/workspace/experiments/create_experiment_m_lflow_runs.py linguist-generated=true -examples/workspace/experiments/create_run_m_lflow_runs.py linguist-generated=true -examples/workspace/experiments/get_experiment_experiments.py linguist-generated=true -examples/workspace/experiments/list_experiments_experiments.py linguist-generated=true -examples/workspace/experiments/update_experiment_experiments.py linguist-generated=true -examples/workspace/experiments/update_run_m_lflow_runs.py linguist-generated=true -examples/workspace/external_locations/create_external_locations_on_aws.py linguist-generated=true -examples/workspace/external_locations/create_volumes.py linguist-generated=true -examples/workspace/external_locations/get_external_locations_on_aws.py linguist-generated=true -examples/workspace/external_locations/list_external_locations_on_aws.py linguist-generated=true -examples/workspace/external_locations/update_external_locations_on_aws.py linguist-generated=true -examples/workspace/git_credentials/create_git_credentials.py linguist-generated=true -examples/workspace/git_credentials/get_git_credentials.py linguist-generated=true -examples/workspace/git_credentials/list_git_credentials.py linguist-generated=true -examples/workspace/git_credentials/update_git_credentials.py linguist-generated=true -examples/workspace/global_init_scripts/create_global_init_scripts.py linguist-generated=true -examples/workspace/global_init_scripts/get_global_init_scripts.py linguist-generated=true -examples/workspace/global_init_scripts/list_global_init_scripts.py linguist-generated=true -examples/workspace/global_init_scripts/update_global_init_scripts.py linguist-generated=true -examples/workspace/grants/get_effective_tables.py linguist-generated=true -examples/workspace/grants/update_tables.py linguist-generated=true -examples/workspace/groups/create_generic_permissions.py linguist-generated=true -examples/workspace/groups/create_groups.py linguist-generated=true -examples/workspace/groups/create_secrets.py linguist-generated=true -examples/workspace/groups/delete_generic_permissions.py linguist-generated=true -examples/workspace/groups/delete_groups.py linguist-generated=true -examples/workspace/groups/delete_secrets.py linguist-generated=true -examples/workspace/groups/get_groups.py linguist-generated=true -examples/workspace/instance_pools/create_instance_pools.py linguist-generated=true -examples/workspace/instance_pools/edit_instance_pools.py linguist-generated=true -examples/workspace/instance_pools/get_instance_pools.py linguist-generated=true -examples/workspace/instance_pools/list_instance_pools.py linguist-generated=true -examples/workspace/instance_profiles/add_aws_instance_profiles.py linguist-generated=true -examples/workspace/instance_profiles/edit_aws_instance_profiles.py linguist-generated=true -examples/workspace/instance_profiles/list_aws_instance_profiles.py linguist-generated=true -examples/workspace/ip_access_lists/create_ip_access_lists.py linguist-generated=true -examples/workspace/ip_access_lists/get_ip_access_lists.py linguist-generated=true -examples/workspace/ip_access_lists/list_ip_access_lists.py linguist-generated=true -examples/workspace/ip_access_lists/replace_ip_access_lists.py linguist-generated=true -examples/workspace/jobs/cancel_all_runs_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/cancel_run_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/create_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/export_run_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/get_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/get_run_output_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/list_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/list_runs_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/repair_run_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/reset_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/run_now_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/submit_jobs_api_full_integration.py linguist-generated=true -examples/workspace/jobs/update_jobs_api_full_integration.py linguist-generated=true -examples/workspace/libraries/update_libraries.py linguist-generated=true -examples/workspace/metastores/assign_metastores.py linguist-generated=true -examples/workspace/metastores/create_metastores.py linguist-generated=true -examples/workspace/metastores/current_metastores.py linguist-generated=true -examples/workspace/metastores/get_metastores.py linguist-generated=true -examples/workspace/metastores/list_metastores.py linguist-generated=true -examples/workspace/metastores/summary_metastores.py linguist-generated=true -examples/workspace/metastores/unassign_metastores.py linguist-generated=true -examples/workspace/metastores/update_metastores.py linguist-generated=true -examples/workspace/model_registry/create_comment_model_version_comments.py linguist-generated=true -examples/workspace/model_registry/create_model_model_version_comments.py linguist-generated=true -examples/workspace/model_registry/create_model_model_versions.py linguist-generated=true -examples/workspace/model_registry/create_model_models.py linguist-generated=true -examples/workspace/model_registry/create_model_version_model_version_comments.py linguist-generated=true -examples/workspace/model_registry/create_model_version_model_versions.py linguist-generated=true -examples/workspace/model_registry/create_webhook_registry_webhooks.py linguist-generated=true -examples/workspace/model_registry/get_model_models.py linguist-generated=true -examples/workspace/model_registry/list_models_models.py linguist-generated=true -examples/workspace/model_registry/list_webhooks_registry_webhooks.py linguist-generated=true -examples/workspace/model_registry/update_comment_model_version_comments.py linguist-generated=true -examples/workspace/model_registry/update_model_models.py linguist-generated=true -examples/workspace/model_registry/update_model_version_model_versions.py linguist-generated=true -examples/workspace/model_registry/update_webhook_registry_webhooks.py linguist-generated=true -examples/workspace/permissions/get_generic_permissions.py linguist-generated=true -examples/workspace/permissions/get_permission_levels_generic_permissions.py linguist-generated=true -examples/workspace/permissions/set_generic_permissions.py linguist-generated=true -examples/workspace/pipelines/create_pipelines.py linguist-generated=true -examples/workspace/pipelines/get_pipelines.py linguist-generated=true -examples/workspace/pipelines/list_pipeline_events_pipelines.py linguist-generated=true -examples/workspace/pipelines/list_pipelines_pipelines.py linguist-generated=true -examples/workspace/pipelines/update_pipelines.py linguist-generated=true -examples/workspace/policy_families/get_cluster_policy_families.py linguist-generated=true -examples/workspace/policy_families/list_cluster_policy_families.py linguist-generated=true -examples/workspace/providers/create_providers.py linguist-generated=true -examples/workspace/providers/get_providers.py linguist-generated=true -examples/workspace/providers/list_providers.py linguist-generated=true -examples/workspace/providers/list_shares_providers.py linguist-generated=true -examples/workspace/providers/update_providers.py linguist-generated=true -examples/workspace/queries/create_alerts.py linguist-generated=true -examples/workspace/queries/create_queries.py linguist-generated=true -examples/workspace/queries/get_queries.py linguist-generated=true -examples/workspace/queries/update_queries.py linguist-generated=true -examples/workspace/query_history/list_sql_query_history.py linguist-generated=true -examples/workspace/recipients/create_recipients.py linguist-generated=true -examples/workspace/recipients/get_recipients.py linguist-generated=true -examples/workspace/recipients/list_recipients.py linguist-generated=true -examples/workspace/recipients/rotate_token_recipients.py linguist-generated=true -examples/workspace/recipients/share_permissions_recipients.py linguist-generated=true -examples/workspace/recipients/update_recipients.py linguist-generated=true -examples/workspace/repos/create_repos.py linguist-generated=true -examples/workspace/repos/get_repos.py linguist-generated=true -examples/workspace/repos/list_repos.py linguist-generated=true -examples/workspace/repos/update_repos.py linguist-generated=true -examples/workspace/schemas/create_schemas.py linguist-generated=true -examples/workspace/schemas/create_shares.py linguist-generated=true -examples/workspace/schemas/create_tables.py linguist-generated=true -examples/workspace/schemas/create_volumes.py linguist-generated=true -examples/workspace/schemas/get_schemas.py linguist-generated=true -examples/workspace/schemas/list_schemas.py linguist-generated=true -examples/workspace/schemas/update_schemas.py linguist-generated=true -examples/workspace/secrets/create_scope_secrets.py linguist-generated=true -examples/workspace/secrets/list_acls_secrets.py linguist-generated=true -examples/workspace/secrets/list_scopes_secrets.py linguist-generated=true -examples/workspace/secrets/list_secrets_secrets.py linguist-generated=true -examples/workspace/secrets/put_acl_secrets.py linguist-generated=true -examples/workspace/secrets/put_secret_secrets.py linguist-generated=true -examples/workspace/service_principals/create_create_obo_token_on_aws.py linguist-generated=true -examples/workspace/service_principals/create_service_principals_on_aws.py linguist-generated=true -examples/workspace/service_principals/get_service_principals_on_aws.py linguist-generated=true -examples/workspace/service_principals/list_service_principals_on_aws.py linguist-generated=true -examples/workspace/service_principals/patch_service_principals_on_aws.py linguist-generated=true -examples/workspace/service_principals/update_service_principals_on_aws.py linguist-generated=true -examples/workspace/shares/create_shares.py linguist-generated=true -examples/workspace/shares/get_shares.py linguist-generated=true -examples/workspace/shares/list_shares.py linguist-generated=true -examples/workspace/shares/update_shares.py linguist-generated=true -examples/workspace/statement_execution/execute_shares.py linguist-generated=true -examples/workspace/statement_execution/execute_tables.py linguist-generated=true -examples/workspace/storage_credentials/create_external_locations_on_aws.py linguist-generated=true -examples/workspace/storage_credentials/create_storage_credentials_on_aws.py linguist-generated=true -examples/workspace/storage_credentials/create_volumes.py linguist-generated=true -examples/workspace/storage_credentials/get_storage_credentials_on_aws.py linguist-generated=true -examples/workspace/storage_credentials/list_storage_credentials_on_aws.py linguist-generated=true -examples/workspace/storage_credentials/update_storage_credentials_on_aws.py linguist-generated=true -examples/workspace/tables/get_tables.py linguist-generated=true -examples/workspace/tables/list_summaries_tables.py linguist-generated=true -examples/workspace/tables/list_tables.py linguist-generated=true -examples/workspace/token_management/create_obo_token_create_obo_token_on_aws.py linguist-generated=true -examples/workspace/token_management/get_create_obo_token_on_aws.py linguist-generated=true -examples/workspace/token_management/list_create_obo_token_on_aws.py linguist-generated=true -examples/workspace/tokens/create_tokens.py linguist-generated=true -examples/workspace/tokens/get_tokens.py linguist-generated=true -examples/workspace/tokens/list_tokens.py linguist-generated=true -examples/workspace/users/create_clusters_api_integration.py linguist-generated=true -examples/workspace/users/create_workspace_users.py linguist-generated=true -examples/workspace/users/delete_clusters_api_integration.py linguist-generated=true -examples/workspace/users/delete_workspace_users.py linguist-generated=true -examples/workspace/users/get_workspace_users.py linguist-generated=true -examples/workspace/users/list_workspace_users.py linguist-generated=true -examples/workspace/users/patch_workspace_users.py linguist-generated=true -examples/workspace/users/update_workspace_users.py linguist-generated=true -examples/workspace/volumes/create_volumes.py linguist-generated=true -examples/workspace/volumes/list_volumes.py linguist-generated=true -examples/workspace/volumes/read_volumes.py linguist-generated=true -examples/workspace/volumes/update_volumes.py linguist-generated=true -examples/workspace/warehouses/create_sql_warehouses.py linguist-generated=true -examples/workspace/warehouses/edit_sql_warehouses.py linguist-generated=true -examples/workspace/warehouses/get_sql_warehouses.py linguist-generated=true -examples/workspace/warehouses/list_sql_warehouses.py linguist-generated=true -examples/workspace/workspace/export_workspace_integration.py linguist-generated=true -examples/workspace/workspace/get_status_generic_permissions.py linguist-generated=true -examples/workspace/workspace/get_status_workspace_integration.py linguist-generated=true -examples/workspace/workspace/import_generic_permissions.py linguist-generated=true -examples/workspace/workspace/import_jobs_api_full_integration.py linguist-generated=true -examples/workspace/workspace/import_pipelines.py linguist-generated=true -examples/workspace/workspace/import_workspace_integration.py linguist-generated=true -examples/workspace/workspace/list_workspace_integration.py linguist-generated=true -examples/workspace/workspace_bindings/get_catalog_workspace_bindings.py linguist-generated=true -examples/workspace/workspace_bindings/update_catalog_workspace_bindings.py linguist-generated=true -examples/workspace/workspace_conf/get_status_repos.py linguist-generated=true diff --git a/.github/workflows/message.yml b/.github/workflows/message.yml new file mode 100644 index 000000000..057556895 --- /dev/null +++ b/.github/workflows/message.yml @@ -0,0 +1,32 @@ +name: Validate Commit Message + +on: + pull_request: + types: [opened, synchronize, edited] + merge_group: + types: [checks_requested] + +jobs: + validate: + runs-on: ubuntu-latest + # GitHub required checks are shared between PRs and the Merge Queue. + # Since there is no PR title on Merge Queue, we need to trigger and + # skip this test for Merge Queue to succeed. + if: github.event_name == 'pull_request' + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Validate Tag + env: + TITLE: ${{ github.event.pull_request.title }} + run: | + TAG=$(echo "$TITLE" | sed -ne 's/\[\(.*\)\].*/\1/p') + if grep -q "tag: \"\[$TAG\]\"" .codegen/changelog_config.yml; then + echo "Valid tag found: [$TAG]" + else + echo "Invalid or missing tag in commit message: [$TAG]" + exit 1 + fi \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..7e0db7234 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,30 @@ +name: Test Workflow + +on: + workflow_call: + inputs: + os: + required: true + type: string + pyVersion: + required: true + type: string + +jobs: + test: + strategy: + fail-fast: false + runs-on: ${{ inputs.os }} + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Unshallow + run: git fetch --prune --unshallow + + - uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.pyVersion }} + + - name: Run tests + run: make dev install test diff --git a/CHANGELOG.md b/CHANGELOG.md index 341770ca5..458921ee0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,460 @@ # Version changelog +## [Release] Release v0.36.0 + +### Breaking Changes +* `external_browser` now uses the `databricks-cli` app instead of the third-party "6128a518-99a9-425b-8333-4cc94f04cacd" application when performing the U2M login flow for Azure workspaces when a client ID is not otherwise specified. This matches the AWS behavior. +* The signatures of several OAuth-related constructors have changed to support U2M OAuth with Azure Entra ID application registrations. See https://github.com/databricks/databricks-sdk-py/blob/main/examples/flask_app_with_oauth.py for examples of how to use these classes. + * `OAuthClient()`: renamed to `OAuthClient.from_host()` + * `SessionCredentials()` and `SessionCredentials.from_dict()`: now accepts `token_endpoint`, `client_id`, `client_secret`, and `refresh_url` as parameters, rather than accepting the `OAuthClient`. + * `TokenCache()`: now accepts `host`, `token_endpoint`, `client_id`, `client_secret`, and `refresh_url` as parameters, rather than accepting the `OAuthClient`. + +### Bug Fixes + + * Decouple OAuth functionality from `Config` ([#784](https://github.com/databricks/databricks-sdk-py/pull/784)). + + +### Release + + * Release v0.35.0 ([#793](https://github.com/databricks/databricks-sdk-py/pull/793)). + + + +## [Release] Release v0.35.0 + +### New Features and Improvements + + * Open AI Client Mixin ([#779](https://github.com/databricks/databricks-sdk-py/pull/779)). + + +### Bug Fixes + + * Update Serving Endpoint mixing template and docs generation logic ([#792](https://github.com/databricks/databricks-sdk-py/pull/792)). + + +### API Changes: + + * Added `databricks.sdk.service.pipelines.ReportSpec` dataclass. + * Added `unity_catalog_provisioning_state` field for `databricks.sdk.service.catalog.OnlineTable`. + * Added `is_truncated` field for `databricks.sdk.service.dashboards.Result`. + * Added `effective_budget_policy_id` field for `databricks.sdk.service.jobs.BaseJob`. + * Added `budget_policy_id` field for `databricks.sdk.service.jobs.CreateJob`. + * Added `effective_budget_policy_id` field for `databricks.sdk.service.jobs.Job`. + * Added `budget_policy_id` field for `databricks.sdk.service.jobs.JobSettings`. + * Added `budget_policy_id` field for `databricks.sdk.service.jobs.SubmitRun`. + * Added `report` field for `databricks.sdk.service.pipelines.IngestionConfig`. + * Added `sequence_by` field for `databricks.sdk.service.pipelines.TableSpecificConfig`. + * Added `notify_on_ok` field for `databricks.sdk.service.sql.Alert`. + * Added `notify_on_ok` field for `databricks.sdk.service.sql.CreateAlertRequestAlert`. + * Added `notify_on_ok` field for `databricks.sdk.service.sql.ListAlertsResponseAlert`. + * Added `notify_on_ok` field for `databricks.sdk.service.sql.UpdateAlertRequestAlert`. + +OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14 + +## [Release] Release v0.34.0 + +### Bug Fixes + + * Fix Model Serving Test ([#781](https://github.com/databricks/databricks-sdk-py/pull/781)). + * Include package name for external types when deserializing responses ([#786](https://github.com/databricks/databricks-sdk-py/pull/786)). + + +### Internal Changes + + * Refactor ApiClient into `_BaseClient` and `ApiClient` ([#785](https://github.com/databricks/databricks-sdk-py/pull/785)). + * Update to latest OpenAPI spec ([#787](https://github.com/databricks/databricks-sdk-py/pull/787)). + * revert Support Models in `dbutils.fs` operations ([#750](https://github.com/databricks/databricks-sdk-py/pull/750)) ([#778](https://github.com/databricks/databricks-sdk-py/pull/778)). + + +### API Changes: + + * Added [w.disable_legacy_dbfs](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/disable_legacy_dbfs.html) workspace-level service. + * Added `default_source_code_path` and `resources` fields for `databricks.sdk.service.apps.App`. + * Added `resources` field for `databricks.sdk.service.apps.CreateAppRequest`. + * Added `resources` field for `databricks.sdk.service.apps.UpdateAppRequest`. + +OpenAPI SHA: bc17b474818138f19b78a7bea0675707dead2b87, Date: 2024-10-07 + +## [Release] Release v0.33.0 + +### Internal Changes + + * Add DCO guidelines ([#773](https://github.com/databricks/databricks-sdk-py/pull/773)). + * Update SDK to latest OpenAPI spec ([#766](https://github.com/databricks/databricks-sdk-py/pull/766)). + + +### API Changes: + + * Added [w.disable_legacy_access](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/disable_legacy_access.html) workspace-level service and [a.disable_legacy_features](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_settings/disable_legacy_features.html) account-level service. + * Added [w.temporary_table_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/temporary_table_credentials.html) workspace-level service. + * Added `put_ai_gateway()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service. + * Added `databricks.sdk.service.apps.ApplicationState`, `databricks.sdk.service.apps.ApplicationStatus`, `databricks.sdk.service.apps.ComputeState` and `databricks.sdk.service.apps.ComputeStatus` dataclasses. + * Added `databricks.sdk.service.catalog.AwsCredentials`, `databricks.sdk.service.catalog.AzureUserDelegationSas`, `databricks.sdk.service.catalog.GcpOauthToken`, `databricks.sdk.service.catalog.GenerateTemporaryTableCredentialRequest`, `databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`, `databricks.sdk.service.catalog.R2Credentials` and `databricks.sdk.service.catalog.TableOperation` dataclasses. + * Added `databricks.sdk.service.serving.AiGatewayConfig`, `databricks.sdk.service.serving.AiGatewayGuardrailParameters`, `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior`, `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehaviorBehavior`, `databricks.sdk.service.serving.AiGatewayGuardrails`, `databricks.sdk.service.serving.AiGatewayInferenceTableConfig`, `databricks.sdk.service.serving.AiGatewayRateLimit`, `databricks.sdk.service.serving.AiGatewayRateLimitKey`, `databricks.sdk.service.serving.AiGatewayRateLimitRenewalPeriod`, `databricks.sdk.service.serving.AiGatewayUsageTrackingConfig`, `databricks.sdk.service.serving.PutAiGatewayRequest` and `databricks.sdk.service.serving.PutAiGatewayResponse` dataclasses. + * Added `databricks.sdk.service.settings.BooleanMessage`, `databricks.sdk.service.settings.DeleteDisableLegacyAccessRequest`, `databricks.sdk.service.settings.DeleteDisableLegacyAccessResponse`, `databricks.sdk.service.settings.DeleteDisableLegacyFeaturesRequest`, `databricks.sdk.service.settings.DeleteDisableLegacyFeaturesResponse`, `databricks.sdk.service.settings.DisableLegacyAccess`, `databricks.sdk.service.settings.DisableLegacyFeatures`, `databricks.sdk.service.settings.GetDisableLegacyAccessRequest`, `databricks.sdk.service.settings.GetDisableLegacyFeaturesRequest`, `databricks.sdk.service.settings.UpdateDisableLegacyAccessRequest` and `databricks.sdk.service.settings.UpdateDisableLegacyFeaturesRequest` dataclasses. + * Added `databricks.sdk.service.workspace.CreateCredentialsRequest`, `databricks.sdk.service.workspace.CreateRepoRequest`, `databricks.sdk.service.workspace.CreateRepoResponse`, `databricks.sdk.service.workspace.DeleteCredentialsRequest`, `any`, `any`, `databricks.sdk.service.workspace.GetCredentialsRequest`, `databricks.sdk.service.workspace.GetRepoResponse`, `databricks.sdk.service.workspace.ListCredentialsResponse`, `databricks.sdk.service.workspace.UpdateCredentialsRequest`, `any`, `databricks.sdk.service.workspace.UpdateRepoRequest` and `any` dataclasses. + * Added `app_status` and `compute_status` fields for `databricks.sdk.service.apps.App`. + * Added `deployment_id` field for `databricks.sdk.service.apps.CreateAppDeploymentRequest`. + * Added `external_access_enabled` field for `databricks.sdk.service.catalog.GetMetastoreSummaryResponse`. + * Added `include_manifest_capabilities` field for `databricks.sdk.service.catalog.GetTableRequest`. + * Added `include_manifest_capabilities` field for `databricks.sdk.service.catalog.ListSummariesRequest`. + * Added `include_manifest_capabilities` field for `databricks.sdk.service.catalog.ListTablesRequest`. + * Added `external_access_enabled` field for `databricks.sdk.service.catalog.MetastoreInfo`. + * Added `budget_policy_id` and `schema` fields for `databricks.sdk.service.pipelines.CreatePipeline`. + * Added `budget_policy_id` and `schema` fields for `databricks.sdk.service.pipelines.EditPipeline`. + * Added `effective_budget_policy_id` field for `databricks.sdk.service.pipelines.GetPipelineResponse`. + * Added `budget_policy_id` and `schema` fields for `databricks.sdk.service.pipelines.PipelineSpec`. + * Added `ai_gateway` field for `databricks.sdk.service.serving.CreateServingEndpoint`. + * Added `ai_gateway` field for `databricks.sdk.service.serving.ServingEndpoint`. + * Added `ai_gateway` field for `databricks.sdk.service.serving.ServingEndpointDetailed`. + * Added `workspace_id` field for `databricks.sdk.service.settings.TokenInfo`. + * Added `credential_id`, `git_provider` and `git_username` fields for `databricks.sdk.service.workspace.GetCredentialsResponse`. + * Changed `delete()`, `start()` and `stop()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service to return `databricks.sdk.service.apps.App` dataclass. + * Changed `deploy()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service with new required argument order. + * Changed `create()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service . New request type is `databricks.sdk.service.workspace.CreateCredentialsRequest` dataclass. + * Changed `delete()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service . New request type is `databricks.sdk.service.workspace.DeleteCredentialsRequest` dataclass. + * Changed `delete()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service to return `any` dataclass. + * Changed `get()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service . New request type is `databricks.sdk.service.workspace.GetCredentialsRequest` dataclass. + * Changed `get()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service to return `databricks.sdk.service.workspace.GetCredentialsResponse` dataclass. + * Changed `list()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service to return `databricks.sdk.service.workspace.ListCredentialsResponse` dataclass. + * Changed `update()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service . New request type is `databricks.sdk.service.workspace.UpdateCredentialsRequest` dataclass. + * Changed `update()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service to return `any` dataclass. + * Changed `create()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service to return `databricks.sdk.service.workspace.CreateRepoResponse` dataclass. + * Changed `create()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service . New request type is `databricks.sdk.service.workspace.CreateRepoRequest` dataclass. + * Changed `delete()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service to return `any` dataclass. + * Changed `get()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service to return `databricks.sdk.service.workspace.GetRepoResponse` dataclass. + * Changed `update()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service to return `any` dataclass. + * Changed `update()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service . New request type is `databricks.sdk.service.workspace.UpdateRepoRequest` dataclass. + * Changed `source_code_path` field for `databricks.sdk.service.apps.AppDeployment` to no longer be required. + * Changed `source_code_path` field for `databricks.sdk.service.apps.CreateAppDeploymentRequest` to no longer be required. + * Changed `return_params` and `routine_dependencies` fields for `databricks.sdk.service.catalog.CreateFunction` to no longer be required. + * Changed `credential_id` and `git_provider` fields for `databricks.sdk.service.workspace.CreateCredentialsResponse` to be required. + * Changed `credential_id` field for `databricks.sdk.service.workspace.CredentialInfo` to be required. + * Changed `patterns` field for `databricks.sdk.service.workspace.SparseCheckout` to `databricks.sdk.service.workspace.List` dataclass. + * Changed `patterns` field for `databricks.sdk.service.workspace.SparseCheckoutUpdate` to `databricks.sdk.service.workspace.List` dataclass. + * Removed `databricks.sdk.service.apps.AppState`, `databricks.sdk.service.apps.AppStatus`, `any` and `any` dataclasses. + * Removed `databricks.sdk.service.sql.ClientCallContext`, `databricks.sdk.service.sql.EncodedText`, `databricks.sdk.service.sql.EncodedTextEncoding`, `databricks.sdk.service.sql.QuerySource`, `databricks.sdk.service.sql.QuerySourceDriverInfo`, `databricks.sdk.service.sql.QuerySourceEntryPoint`, `databricks.sdk.service.sql.QuerySourceJobManager`, `databricks.sdk.service.sql.QuerySourceTrigger` and `databricks.sdk.service.sql.ServerlessChannelInfo` dataclasses. + * Removed `databricks.sdk.service.workspace.CreateCredentials`, `databricks.sdk.service.workspace.CreateRepo`, `databricks.sdk.service.workspace.DeleteGitCredentialRequest`, `databricks.sdk.service.workspace.GetGitCredentialRequest`, `databricks.sdk.service.workspace.SparseCheckoutPattern`, `databricks.sdk.service.workspace.UpdateCredentials`, `databricks.sdk.service.workspace.UpdateRepo` and `any` dataclasses. + * Removed `status` field for `databricks.sdk.service.apps.App`. + * Removed `query_source` field for `databricks.sdk.service.sql.QueryInfo`. + * Removed `credentials` field for `databricks.sdk.service.workspace.GetCredentialsResponse`. + +OpenAPI SHA: 248f4ad9668661da9d0bf4a7b0119a2d44fd1e75, Date: 2024-09-25 + +## [Release] Release v0.32.3 + +### New Features and Improvements + + * Integrate Databricks SDK with Model Serving Auth Provider ([#761](https://github.com/databricks/databricks-sdk-py/pull/761)). + + +### Bug Fixes + + * Add DataPlane docs to the index ([#764](https://github.com/databricks/databricks-sdk-py/pull/764)). + * `mypy` error: Skipping analyzing "google": module is installed, but missing library stubs or py.typed marker ([#769](https://github.com/databricks/databricks-sdk-py/pull/769)). + + + +## [Release] Release v0.32.2 + +### New Features and Improvements + + * Support Models in `dbutils.fs` operations ([#750](https://github.com/databricks/databricks-sdk-py/pull/750)). + + +### Bug Fixes + + * Do not specify --tenant flag when fetching managed identity access token from the CLI ([#748](https://github.com/databricks/databricks-sdk-py/pull/748)). + * Fix deserialization of 401/403 errors ([#758](https://github.com/databricks/databricks-sdk-py/pull/758)). + * Use correct optional typing in `WorkspaceClient` for `mypy` ([#760](https://github.com/databricks/databricks-sdk-py/pull/760)). + + + +## [Release] Release v0.32.1 + +### Bug Fixes + + * Properly include message when handing SCIM errors ([#753](https://github.com/databricks/databricks-sdk-py/pull/753)). + + + +## [Release] Release v0.32.0 + +### Bug Fixes + + * Handle non-JSON errors gracefully ([#741](https://github.com/databricks/databricks-sdk-py/pull/741)). + + +### Documentation + + * Add Data Plane access documentation ([#732](https://github.com/databricks/databricks-sdk-py/pull/732)). + + +### Internal Changes + + * Fix test_iam::test_scim_error_unmarshall integration test ([#743](https://github.com/databricks/databricks-sdk-py/pull/743)). + + +### API Changes: + + * Added `regenerate_dashboard()` method for [w.quality_monitors](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/quality_monitors.html) workspace-level service. + * Added `databricks.sdk.service.catalog.RegenerateDashboardRequest` and `databricks.sdk.service.catalog.RegenerateDashboardResponse` dataclasses. + * Added `databricks.sdk.service.jobs.QueueDetails`, `databricks.sdk.service.jobs.QueueDetailsCodeCode`, `databricks.sdk.service.jobs.RunLifecycleStateV2State`, `databricks.sdk.service.jobs.RunStatus`, `databricks.sdk.service.jobs.TerminationCodeCode`, `databricks.sdk.service.jobs.TerminationDetails` and `databricks.sdk.service.jobs.TerminationTypeType` dataclasses. + * Added `status` field for `databricks.sdk.service.jobs.BaseRun`. + * Added `status` field for `databricks.sdk.service.jobs.RepairHistoryItem`. + * Added `status` field for `databricks.sdk.service.jobs.Run`. + * Added `status` field for `databricks.sdk.service.jobs.RunTask`. + * Added `max_provisioned_throughput` and `min_provisioned_throughput` fields for `databricks.sdk.service.serving.ServedModelInput`. + * Added `columns_to_sync` field for `databricks.sdk.service.vectorsearch.DeltaSyncVectorIndexSpecRequest`. + * Changed `workload_size` field for `databricks.sdk.service.serving.ServedModelInput` to no longer be required. + +OpenAPI SHA: d05898328669a3f8ab0c2ecee37db2673d3ea3f7, Date: 2024-09-04 + +## [Release] Release v0.31.1 + +### Bug Fixes + + * Fix `DatabricksConfig.copy` when authenticated with OAuth ([#723](https://github.com/databricks/databricks-sdk-py/pull/723)). + + +### Internal Changes + + * Fix get_workspace_client test to match Go SDK behavior ([#738](https://github.com/databricks/databricks-sdk-py/pull/738)). + * Verify that `WorkspaceClient` created from `AccountClient` does actually work through integration tests ([#736](https://github.com/databricks/databricks-sdk-py/pull/736)). + + + +## [Release] Release v0.31.0 + +### Bug Fixes + + * Fixed regression introduced in v0.30.0 causing `ValueError: Invalid semantic version: 0.33.1+420240816190912` ([#729](https://github.com/databricks/databricks-sdk-py/pull/729)). + + +### Internal Changes + + * Escape single quotes in regex matchers ([#727](https://github.com/databricks/databricks-sdk-py/pull/727)). + + +### API Changes: + + * Added [w.policy_compliance_for_clusters](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/policy_compliance_for_clusters.html) workspace-level service. + * Added [w.policy_compliance_for_jobs](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/policy_compliance_for_jobs.html) workspace-level service. + * Added [w.resource_quotas](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/resource_quotas.html) workspace-level service. + * Added `databricks.sdk.service.catalog.GetQuotaRequest`, `databricks.sdk.service.catalog.GetQuotaResponse`, `databricks.sdk.service.catalog.ListQuotasRequest`, `databricks.sdk.service.catalog.ListQuotasResponse` and `databricks.sdk.service.catalog.QuotaInfo` dataclasses. + * Added `databricks.sdk.service.compute.ClusterCompliance`, `databricks.sdk.service.compute.ClusterSettingsChange`, `databricks.sdk.service.compute.EnforceClusterComplianceRequest`, `databricks.sdk.service.compute.EnforceClusterComplianceResponse`, `databricks.sdk.service.compute.GetClusterComplianceRequest`, `databricks.sdk.service.compute.GetClusterComplianceResponse`, `databricks.sdk.service.compute.ListClusterCompliancesRequest` and `databricks.sdk.service.compute.ListClusterCompliancesResponse` dataclasses. + * Added `databricks.sdk.service.jobs.EnforcePolicyComplianceForJobResponseJobClusterSettingsChange`, `databricks.sdk.service.jobs.EnforcePolicyComplianceRequest`, `databricks.sdk.service.jobs.EnforcePolicyComplianceResponse`, `databricks.sdk.service.jobs.GetPolicyComplianceRequest`, `databricks.sdk.service.jobs.GetPolicyComplianceResponse`, `databricks.sdk.service.jobs.JobCompliance`, `databricks.sdk.service.jobs.ListJobComplianceForPolicyResponse` and `databricks.sdk.service.jobs.ListJobComplianceRequest` dataclasses. + * Added `fallback` field for `databricks.sdk.service.catalog.CreateExternalLocation`. + * Added `fallback` field for `databricks.sdk.service.catalog.ExternalLocationInfo`. + * Added `fallback` field for `databricks.sdk.service.catalog.UpdateExternalLocation`. + * Added `job_run_id` field for `databricks.sdk.service.jobs.BaseRun`. + * Added `job_run_id` field for `databricks.sdk.service.jobs.Run`. + * Added `include_metrics` field for `databricks.sdk.service.sql.ListQueryHistoryRequest`. + * Added `statement_ids` field for `databricks.sdk.service.sql.QueryFilter`. + * Removed `databricks.sdk.service.sql.ContextFilter` dataclass. + * Removed `context_filter` field for `databricks.sdk.service.sql.QueryFilter`. + * Removed `pipeline_id` and `pipeline_update_id` fields for `databricks.sdk.service.sql.QuerySource`. + +OpenAPI SHA: 3eae49b444cac5a0118a3503e5b7ecef7f96527a, Date: 2024-08-21 + +## [Release] Release v0.30.0 + +### New Features and Improvements + + * Add DataPlane support ([#700](https://github.com/databricks/databricks-sdk-py/pull/700)). + * Support partners in SDK ([#648](https://github.com/databricks/databricks-sdk-py/pull/648)). + + +### Bug Fixes + + * Check trailing slash in host url ([#681](https://github.com/databricks/databricks-sdk-py/pull/681)). + * Decrease runtime of recursive workspace listing test ([#721](https://github.com/databricks/databricks-sdk-py/pull/721)). + * Fix test_get_workspace_client and test_runtime_auth_from_jobs ([#719](https://github.com/databricks/databricks-sdk-py/pull/719)). + * Infer Azure tenant ID if not set ([#638](https://github.com/databricks/databricks-sdk-py/pull/638)). + + +### Internal Changes + + * Add Release tag and Workflow fix ([#704](https://github.com/databricks/databricks-sdk-py/pull/704)). + * Add apps package in docgen ([#722](https://github.com/databricks/databricks-sdk-py/pull/722)). + * Fix processing of `quoted` titles ([#712](https://github.com/databricks/databricks-sdk-py/pull/712)). + * Improve Changelog by grouping changes ([#703](https://github.com/databricks/databricks-sdk-py/pull/703)). + * Move PR message validation to a separate workflow ([#707](https://github.com/databricks/databricks-sdk-py/pull/707)). + * Test that Jobs API endpoints are pinned to 2.1 ([#714](https://github.com/databricks/databricks-sdk-py/pull/714)). + * Trigger the validate workflow in the merge queue ([#709](https://github.com/databricks/databricks-sdk-py/pull/709)). + * Update OpenAPI spec ([#715](https://github.com/databricks/databricks-sdk-py/pull/715)). + + +### Other Changes + + * Add Windows WorkFlow ([#692](https://github.com/databricks/databricks-sdk-py/pull/692)). + * Fix auth tests for windows. ([#697](https://github.com/databricks/databricks-sdk-py/pull/697)). + * Fix for cancelled workflow ([#701](https://github.com/databricks/databricks-sdk-py/pull/701)). + * Fix test_core for windows ([#702](https://github.com/databricks/databricks-sdk-py/pull/702)). + * Fix test_local_io for windows ([#695](https://github.com/databricks/databricks-sdk-py/pull/695)). + * Remove duplicate ubuntu tests ([#693](https://github.com/databricks/databricks-sdk-py/pull/693)). + * fix windows path ([#660](https://github.com/databricks/databricks-sdk-py/pull/660)) ([#673](https://github.com/databricks/databricks-sdk-py/pull/673)). + + +### API Changes: + + * Added `databricks.sdk.service.apps` package. + * Added [a.usage_dashboards](https://databricks-sdk-py.readthedocs.io/en/latest/account/usage_dashboards.html) account-level service. + * Added [w.alerts_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts_legacy.html) workspace-level service, [w.queries_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries_legacy.html) workspace-level service and [w.query_visualizations_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations_legacy.html) workspace-level service. + * Added [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service. + * Added [w.notification_destinations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/notification_destinations.html) workspace-level service. + * Added `update()` method for [w.clusters](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/clusters.html) workspace-level service. + * Added `list_visualizations()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service. + * Added `databricks.sdk.service.catalog.GetBindingsSecurableType` and `databricks.sdk.service.catalog.UpdateBindingsSecurableType` dataclasses. + * Added `databricks.sdk.service.billing.ActionConfiguration`, `databricks.sdk.service.billing.ActionConfigurationType`, `databricks.sdk.service.billing.AlertConfiguration`, `databricks.sdk.service.billing.AlertConfigurationQuantityType`, `databricks.sdk.service.billing.AlertConfigurationTimePeriod`, `databricks.sdk.service.billing.AlertConfigurationTriggerType`, `databricks.sdk.service.billing.BudgetConfiguration`, `databricks.sdk.service.billing.BudgetConfigurationFilter`, `databricks.sdk.service.billing.BudgetConfigurationFilterClause`, `databricks.sdk.service.billing.BudgetConfigurationFilterOperator`, `databricks.sdk.service.billing.BudgetConfigurationFilterTagClause`, `databricks.sdk.service.billing.BudgetConfigurationFilterWorkspaceIdClause`, `databricks.sdk.service.billing.CreateBillingUsageDashboardRequest`, `databricks.sdk.service.billing.CreateBillingUsageDashboardResponse`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudget`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudgetActionConfigurations`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudgetAlertConfigurations`, `databricks.sdk.service.billing.CreateBudgetConfigurationRequest`, `databricks.sdk.service.billing.CreateBudgetConfigurationResponse`, `databricks.sdk.service.billing.DeleteBudgetConfigurationRequest`, `any`, `databricks.sdk.service.billing.GetBillingUsageDashboardRequest`, `databricks.sdk.service.billing.GetBillingUsageDashboardResponse`, `databricks.sdk.service.billing.GetBudgetConfigurationRequest`, `databricks.sdk.service.billing.GetBudgetConfigurationResponse`, `databricks.sdk.service.billing.ListBudgetConfigurationsRequest`, `databricks.sdk.service.billing.ListBudgetConfigurationsResponse`, `databricks.sdk.service.billing.UpdateBudgetConfigurationBudget`, `databricks.sdk.service.billing.UpdateBudgetConfigurationRequest`, `databricks.sdk.service.billing.UpdateBudgetConfigurationResponse` and `databricks.sdk.service.billing.UsageDashboardType` dataclasses. + * Added `databricks.sdk.service.compute.ListClustersFilterBy`, `databricks.sdk.service.compute.ListClustersSortBy`, `databricks.sdk.service.compute.ListClustersSortByDirection`, `databricks.sdk.service.compute.ListClustersSortByField`, `databricks.sdk.service.compute.UpdateCluster`, `databricks.sdk.service.compute.UpdateClusterResource` and `any` dataclasses. + * Added `databricks.sdk.service.dashboards.ExecuteMessageQueryRequest`, `databricks.sdk.service.dashboards.GenieAttachment`, `databricks.sdk.service.dashboards.GenieConversation`, `databricks.sdk.service.dashboards.GenieCreateConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieGetConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieGetMessageQueryResultRequest`, `databricks.sdk.service.dashboards.GenieGetMessageQueryResultResponse`, `databricks.sdk.service.dashboards.GenieMessage`, `databricks.sdk.service.dashboards.GenieStartConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieStartConversationResponse`, `databricks.sdk.service.dashboards.MessageError`, `databricks.sdk.service.dashboards.MessageErrorType`, `databricks.sdk.service.dashboards.MessageStatus`, `databricks.sdk.service.dashboards.QueryAttachment`, `databricks.sdk.service.dashboards.Result` and `databricks.sdk.service.dashboards.TextAttachment` dataclasses. + * Added `any`, `databricks.sdk.service.iam.MigratePermissionsRequest` and `databricks.sdk.service.iam.MigratePermissionsResponse` dataclasses. + * Added `databricks.sdk.service.oauth2.ListCustomAppIntegrationsRequest` and `databricks.sdk.service.oauth2.ListPublishedAppIntegrationsRequest` dataclasses. + * Added `databricks.sdk.service.pipelines.IngestionPipelineDefinition` and `databricks.sdk.service.pipelines.PipelineStateInfoHealth` dataclasses. + * Added `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` dataclass. + * Added `databricks.sdk.service.settings.Config`, `databricks.sdk.service.settings.CreateNotificationDestinationRequest`, `databricks.sdk.service.settings.DeleteNotificationDestinationRequest`, `databricks.sdk.service.settings.DestinationType`, `databricks.sdk.service.settings.EmailConfig`, `any`, `databricks.sdk.service.settings.GenericWebhookConfig`, `databricks.sdk.service.settings.GetNotificationDestinationRequest`, `databricks.sdk.service.settings.ListNotificationDestinationsRequest`, `databricks.sdk.service.settings.ListNotificationDestinationsResponse`, `databricks.sdk.service.settings.ListNotificationDestinationsResult`, `databricks.sdk.service.settings.MicrosoftTeamsConfig`, `databricks.sdk.service.settings.NotificationDestination`, `databricks.sdk.service.settings.PagerdutyConfig`, `databricks.sdk.service.settings.SlackConfig` and `databricks.sdk.service.settings.UpdateNotificationDestinationRequest` dataclasses. + * Added `databricks.sdk.service.sql.AlertCondition`, `databricks.sdk.service.sql.AlertConditionOperand`, `databricks.sdk.service.sql.AlertConditionThreshold`, `databricks.sdk.service.sql.AlertOperandColumn`, `databricks.sdk.service.sql.AlertOperandValue`, `databricks.sdk.service.sql.AlertOperator`, `databricks.sdk.service.sql.ClientCallContext`, `databricks.sdk.service.sql.ContextFilter`, `databricks.sdk.service.sql.CreateAlertRequest`, `databricks.sdk.service.sql.CreateAlertRequestAlert`, `databricks.sdk.service.sql.CreateQueryRequest`, `databricks.sdk.service.sql.CreateQueryRequestQuery`, `databricks.sdk.service.sql.CreateQueryVisualizationsLegacyRequest`, `databricks.sdk.service.sql.CreateVisualizationRequest`, `databricks.sdk.service.sql.CreateVisualizationRequestVisualization`, `databricks.sdk.service.sql.DatePrecision`, `databricks.sdk.service.sql.DateRange`, `databricks.sdk.service.sql.DateRangeValue`, `databricks.sdk.service.sql.DateRangeValueDynamicDateRange`, `databricks.sdk.service.sql.DateValue`, `databricks.sdk.service.sql.DateValueDynamicDate`, `databricks.sdk.service.sql.DeleteAlertsLegacyRequest`, `databricks.sdk.service.sql.DeleteQueriesLegacyRequest`, `databricks.sdk.service.sql.DeleteQueryVisualizationsLegacyRequest`, `databricks.sdk.service.sql.DeleteVisualizationRequest`, `any`, `databricks.sdk.service.sql.EncodedText`, `databricks.sdk.service.sql.EncodedTextEncoding`, `databricks.sdk.service.sql.EnumValue`, `databricks.sdk.service.sql.GetAlertsLegacyRequest`, `databricks.sdk.service.sql.GetQueriesLegacyRequest`, `databricks.sdk.service.sql.LegacyAlert`, `databricks.sdk.service.sql.LegacyAlertState`, `databricks.sdk.service.sql.LegacyQuery`, `databricks.sdk.service.sql.LegacyVisualization`, `databricks.sdk.service.sql.LifecycleState`, `databricks.sdk.service.sql.ListAlertsRequest`, `databricks.sdk.service.sql.ListAlertsResponse`, `databricks.sdk.service.sql.ListAlertsResponseAlert`, `databricks.sdk.service.sql.ListQueriesLegacyRequest`, `databricks.sdk.service.sql.ListQueryObjectsResponse`, `databricks.sdk.service.sql.ListQueryObjectsResponseQuery`, `databricks.sdk.service.sql.ListVisualizationsForQueryRequest`, `databricks.sdk.service.sql.ListVisualizationsForQueryResponse`, `databricks.sdk.service.sql.NumericValue`, `databricks.sdk.service.sql.QueryBackedValue`, `databricks.sdk.service.sql.QueryParameter`, `databricks.sdk.service.sql.QuerySource`, `databricks.sdk.service.sql.QuerySourceDriverInfo`, `databricks.sdk.service.sql.QuerySourceEntryPoint`, `databricks.sdk.service.sql.QuerySourceJobManager`, `databricks.sdk.service.sql.QuerySourceTrigger`, `databricks.sdk.service.sql.RestoreQueriesLegacyRequest`, `databricks.sdk.service.sql.RunAsMode`, `databricks.sdk.service.sql.ServerlessChannelInfo`, `databricks.sdk.service.sql.StatementResponse`, `databricks.sdk.service.sql.TextValue`, `databricks.sdk.service.sql.TrashAlertRequest`, `databricks.sdk.service.sql.TrashQueryRequest`, `databricks.sdk.service.sql.UpdateAlertRequest`, `databricks.sdk.service.sql.UpdateAlertRequestAlert`, `databricks.sdk.service.sql.UpdateQueryRequest`, `databricks.sdk.service.sql.UpdateQueryRequestQuery`, `databricks.sdk.service.sql.UpdateVisualizationRequest` and `databricks.sdk.service.sql.UpdateVisualizationRequestVisualization` dataclasses. + * Added `force` field for `databricks.sdk.service.catalog.DeleteSchemaRequest`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.GetBindingsRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetByAliasRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetModelVersionRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetRegisteredModelRequest`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.ListSystemSchemasRequest`. + * Added `next_page_token` field for `databricks.sdk.service.catalog.ListSystemSchemasResponse`. + * Added `aliases` field for `databricks.sdk.service.catalog.ModelVersionInfo`. + * Added `next_page_token` field for `databricks.sdk.service.catalog.WorkspaceBindingsResponse`. + * Added `version` field for `databricks.sdk.service.compute.GetPolicyFamilyRequest`. + * Added `filter_by`, `page_size`, `page_token` and `sort_by` fields for `databricks.sdk.service.compute.ListClustersRequest`. + * Added `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.compute.ListClustersResponse`. + * Added `page_token` field for `databricks.sdk.service.jobs.GetRunRequest`. + * Added `iterations`, `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.jobs.Run`. + * Added `create_time`, `created_by`, `creator_username` and `scopes` fields for `databricks.sdk.service.oauth2.GetCustomAppIntegrationOutput`. + * Added `next_page_token` field for `databricks.sdk.service.oauth2.GetCustomAppIntegrationsOutput`. + * Added `create_time` and `created_by` fields for `databricks.sdk.service.oauth2.GetPublishedAppIntegrationOutput`. + * Added `next_page_token` field for `databricks.sdk.service.oauth2.GetPublishedAppIntegrationsOutput`. + * Added `enable_local_disk_encryption` field for `databricks.sdk.service.pipelines.PipelineCluster`. + * Added `whl` field for `databricks.sdk.service.pipelines.PipelineLibrary`. + * Added `health` field for `databricks.sdk.service.pipelines.PipelineStateInfo`. + * Added `ai21labs_api_key_plaintext` field for `databricks.sdk.service.serving.Ai21LabsConfig`. + * Added `aws_access_key_id_plaintext` and `aws_secret_access_key_plaintext` fields for `databricks.sdk.service.serving.AmazonBedrockConfig`. + * Added `anthropic_api_key_plaintext` field for `databricks.sdk.service.serving.AnthropicConfig`. + * Added `cohere_api_base` and `cohere_api_key_plaintext` fields for `databricks.sdk.service.serving.CohereConfig`. + * Added `databricks_api_token_plaintext` field for `databricks.sdk.service.serving.DatabricksModelServingConfig`. + * Added `google_cloud_vertex_ai_config` field for `databricks.sdk.service.serving.ExternalModel`. + * Added `microsoft_entra_client_secret_plaintext` and `openai_api_key_plaintext` fields for `databricks.sdk.service.serving.OpenAiConfig`. + * Added `palm_api_key_plaintext` field for `databricks.sdk.service.serving.PaLmConfig`. + * Added `expiration_time` field for `databricks.sdk.service.sharing.CreateRecipient`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.GetRecipientSharePermissionsResponse`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListProviderSharesResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListProvidersRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListProvidersResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListRecipientsRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListRecipientsResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListSharesRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListSharesResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.SharePermissionsRequest`. + * Added `expiration_time` field for `databricks.sdk.service.sharing.UpdateRecipient`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.UpdateSharePermissions`. + * Added `condition`, `create_time`, `custom_body`, `custom_subject`, `display_name`, `lifecycle_state`, `owner_user_name`, `parent_path`, `query_id`, `seconds_to_retrigger`, `trigger_time` and `update_time` fields for `databricks.sdk.service.sql.Alert`. + * Added `id` field for `databricks.sdk.service.sql.GetAlertRequest`. + * Added `id` field for `databricks.sdk.service.sql.GetQueryRequest`. + * Added `page_token` field for `databricks.sdk.service.sql.ListQueriesRequest`. + * Added `apply_auto_limit`, `catalog`, `create_time`, `display_name`, `last_modifier_user_name`, `lifecycle_state`, `owner_user_name`, `parameters`, `parent_path`, `query_text`, `run_as_mode`, `schema`, `update_time` and `warehouse_id` fields for `databricks.sdk.service.sql.Query`. + * Added `context_filter` field for `databricks.sdk.service.sql.QueryFilter`. + * Added `query_source` field for `databricks.sdk.service.sql.QueryInfo`. + * Added `create_time`, `display_name`, `query_id`, `serialized_options`, `serialized_query_plan` and `update_time` fields for `databricks.sdk.service.sql.Visualization`. + * Changed `create()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.CreateBudgetConfigurationResponse` dataclass. + * Changed `create()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.CreateBudgetConfigurationRequest` dataclass. + * Changed `delete()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.DeleteBudgetConfigurationRequest` dataclass. + * Changed `delete()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `any` dataclass. + * Changed `get()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.GetBudgetConfigurationRequest` dataclass. + * Changed `get()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.GetBudgetConfigurationResponse` dataclass. + * Changed `list()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.ListBudgetConfigurationsResponse` dataclass. + * Changed `list()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to require request of `databricks.sdk.service.billing.ListBudgetConfigurationsRequest` dataclass. + * Changed `update()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.UpdateBudgetConfigurationResponse` dataclass. + * Changed `update()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.UpdateBudgetConfigurationRequest` dataclass. + * Changed `create()` method for [a.custom_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/custom_app_integration.html) account-level service with new required argument order. + * Changed `list()` method for [a.custom_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/custom_app_integration.html) account-level service to require request of `databricks.sdk.service.oauth2.ListCustomAppIntegrationsRequest` dataclass. + * Changed `list()` method for [a.published_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/published_app_integration.html) account-level service to require request of `databricks.sdk.service.oauth2.ListPublishedAppIntegrationsRequest` dataclass. + * Changed `delete()` method for [a.workspace_assignment](https://databricks-sdk-py.readthedocs.io/en/latest/account/workspace_assignment.html) account-level service to return `any` dataclass. + * Changed `update()` method for [a.workspace_assignment](https://databricks-sdk-py.readthedocs.io/en/latest/account/workspace_assignment.html) account-level service with new required argument order. + * Changed `create()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateAlertRequest` dataclass. + * Changed `delete()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.TrashAlertRequest` dataclass. + * Changed `get()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service with new required argument order. + * Changed `list()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `databricks.sdk.service.sql.ListAlertsResponse` dataclass. + * Changed `list()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to require request of `databricks.sdk.service.sql.ListAlertsRequest` dataclass. + * Changed `update()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `databricks.sdk.service.sql.Alert` dataclass. + * Changed `update()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateAlertRequest` dataclass. + * Changed `create()` and `edit()` methods for [w.cluster_policies](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cluster_policies.html) workspace-level service with new required argument order. + * Changed `get()` method for [w.model_versions](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/model_versions.html) workspace-level service to return `databricks.sdk.service.catalog.ModelVersionInfo` dataclass. + * Changed `migrate_permissions()` method for [w.permission_migration](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/permission_migration.html) workspace-level service . New request type is `databricks.sdk.service.iam.MigratePermissionsRequest` dataclass. + * Changed `migrate_permissions()` method for [w.permission_migration](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/permission_migration.html) workspace-level service to return `databricks.sdk.service.iam.MigratePermissionsResponse` dataclass. + * Changed `create()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateQueryRequest` dataclass. + * Changed `delete()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.TrashQueryRequest` dataclass. + * Changed `get()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service with new required argument order. + * Changed `list()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service to return `databricks.sdk.service.sql.ListQueryObjectsResponse` dataclass. + * Changed `update()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateQueryRequest` dataclass. + * Changed `create()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateVisualizationRequest` dataclass. + * Changed `delete()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.DeleteVisualizationRequest` dataclass. + * Changed `update()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateVisualizationRequest` dataclass. + * Changed `list()` method for [w.shares](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/shares.html) workspace-level service to require request of `databricks.sdk.service.sharing.ListSharesRequest` dataclass. + * Changed `execute_statement()` and `get_statement()` methods for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/statement_execution.html) workspace-level service to return `databricks.sdk.service.sql.StatementResponse` dataclass. + * Changed `securable_type` field for `databricks.sdk.service.catalog.GetBindingsRequest` to `databricks.sdk.service.catalog.GetBindingsSecurableType` dataclass. + * Changed `securable_type` field for `databricks.sdk.service.catalog.UpdateWorkspaceBindingsParameters` to `databricks.sdk.service.catalog.UpdateBindingsSecurableType` dataclass. + * Changed `name` field for `databricks.sdk.service.compute.CreatePolicy` to no longer be required. + * Changed `name` field for `databricks.sdk.service.compute.EditPolicy` to no longer be required. + * Changed `policy_family_id` field for `databricks.sdk.service.compute.GetPolicyFamilyRequest` to `str` dataclass. + * Changed `policy_families` field for `databricks.sdk.service.compute.ListPolicyFamiliesResponse` to no longer be required. + * Changed `definition`, `description`, `name` and `policy_family_id` fields for `databricks.sdk.service.compute.PolicyFamily` to no longer be required. + * Changed `permissions` field for `databricks.sdk.service.iam.UpdateWorkspaceAssignments` to no longer be required. + * Changed `access_control_list` field for `databricks.sdk.service.jobs.CreateJob` to `databricks.sdk.service.jobs.JobAccessControlRequestList` dataclass. + * Changed `access_control_list` field for `databricks.sdk.service.jobs.SubmitRun` to `databricks.sdk.service.jobs.JobAccessControlRequestList` dataclass. + * Changed `name` and `redirect_urls` fields for `databricks.sdk.service.oauth2.CreateCustomAppIntegration` to no longer be required. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.CreatePipeline` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.EditPipeline` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.PipelineSpec` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ai21labs_api_key` field for `databricks.sdk.service.serving.Ai21LabsConfig` to no longer be required. + * Changed `aws_access_key_id` and `aws_secret_access_key` fields for `databricks.sdk.service.serving.AmazonBedrockConfig` to no longer be required. + * Changed `anthropic_api_key` field for `databricks.sdk.service.serving.AnthropicConfig` to no longer be required. + * Changed `cohere_api_key` field for `databricks.sdk.service.serving.CohereConfig` to no longer be required. + * Changed `databricks_api_token` field for `databricks.sdk.service.serving.DatabricksModelServingConfig` to no longer be required. + * Changed `palm_api_key` field for `databricks.sdk.service.serving.PaLmConfig` to no longer be required. + * Changed `tags` field for `databricks.sdk.service.sql.Query` to `databricks.sdk.service.sql.List` dataclass. + * Changed `user_ids` and `warehouse_ids` fields for `databricks.sdk.service.sql.QueryFilter` to `databricks.sdk.service.sql.List` dataclass. + * Changed `results` field for `databricks.sdk.service.sql.QueryList` to `databricks.sdk.service.sql.LegacyQueryList` dataclass. + * Changed `visualization` field for `databricks.sdk.service.sql.Widget` to `databricks.sdk.service.sql.LegacyVisualization` dataclass. + * Removed [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Removed `restore()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service. + * Removed `databricks.sdk.service.marketplace.FilterType`, `databricks.sdk.service.marketplace.ProviderIconFile`, `databricks.sdk.service.marketplace.ProviderIconType`, `databricks.sdk.service.marketplace.ProviderListingSummaryInfo`, `databricks.sdk.service.marketplace.SortBy` and `databricks.sdk.service.marketplace.VisibilityFilter` dataclasses. + * Removed `databricks.sdk.service.billing.Budget`, `databricks.sdk.service.billing.BudgetAlert`, `databricks.sdk.service.billing.BudgetList`, `databricks.sdk.service.billing.BudgetWithStatus`, `databricks.sdk.service.billing.BudgetWithStatusStatusDailyItem`, `databricks.sdk.service.billing.DeleteBudgetRequest`, `any`, `databricks.sdk.service.billing.GetBudgetRequest`, `any`, `databricks.sdk.service.billing.WrappedBudget` and `databricks.sdk.service.billing.WrappedBudgetWithStatus` dataclasses. + * Removed `any`, `databricks.sdk.service.iam.PermissionMigrationRequest` and `databricks.sdk.service.iam.PermissionMigrationResponse` dataclasses. + * Removed `databricks.sdk.service.pipelines.ManagedIngestionPipelineDefinition` dataclass. + * Removed `databricks.sdk.service.serving.App`, `databricks.sdk.service.serving.AppDeployment`, `databricks.sdk.service.serving.AppDeploymentArtifacts`, `databricks.sdk.service.serving.AppDeploymentMode`, `databricks.sdk.service.serving.AppDeploymentState`, `databricks.sdk.service.serving.AppDeploymentStatus`, `databricks.sdk.service.serving.AppEnvironment`, `databricks.sdk.service.serving.AppState`, `databricks.sdk.service.serving.AppStatus`, `databricks.sdk.service.serving.CreateAppDeploymentRequest`, `databricks.sdk.service.serving.CreateAppRequest`, `databricks.sdk.service.serving.DeleteAppRequest`, `databricks.sdk.service.serving.EnvVariable`, `databricks.sdk.service.serving.GetAppDeploymentRequest`, `databricks.sdk.service.serving.GetAppEnvironmentRequest`, `databricks.sdk.service.serving.GetAppRequest`, `databricks.sdk.service.serving.ListAppDeploymentsRequest`, `databricks.sdk.service.serving.ListAppDeploymentsResponse`, `databricks.sdk.service.serving.ListAppsRequest`, `databricks.sdk.service.serving.ListAppsResponse`, `databricks.sdk.service.serving.StartAppRequest`, `databricks.sdk.service.serving.StopAppRequest`, `any` and `databricks.sdk.service.serving.UpdateAppRequest` dataclasses. + * Removed `databricks.sdk.service.sql.CreateQueryVisualizationRequest`, `databricks.sdk.service.sql.DeleteAlertRequest`, `databricks.sdk.service.sql.DeleteQueryRequest`, `databricks.sdk.service.sql.DeleteQueryVisualizationRequest`, `databricks.sdk.service.sql.ExecuteStatementResponse`, `databricks.sdk.service.sql.GetStatementResponse`, `databricks.sdk.service.sql.RestoreQueryRequest`, `databricks.sdk.service.sql.StatementId`, `databricks.sdk.service.sql.UserId` and `databricks.sdk.service.sql.WarehouseId` dataclasses. + * Removed `databricks.sdk.service.compute.PolicyFamilyId` dataclass. + * Removed `can_use_client` field for `databricks.sdk.service.compute.ListClustersRequest`. + * Removed `is_ascending` and `sort_by` fields for `databricks.sdk.service.marketplace.ListListingsRequest`. + * Removed `provider_summary` field for `databricks.sdk.service.marketplace.Listing`. + * Removed `filters` field for `databricks.sdk.service.marketplace.ListingSetting`. + * Removed `metastore_id` field for `databricks.sdk.service.marketplace.ListingSummary`. + * Removed `is_ascending` and `sort_by` fields for `databricks.sdk.service.marketplace.SearchListingsRequest`. + * Removed `created_at`, `last_triggered_at`, `name`, `options`, `parent`, `query`, `rearm`, `updated_at` and `user` fields for `databricks.sdk.service.sql.Alert`. + * Removed `alert_id` field for `databricks.sdk.service.sql.GetAlertRequest`. + * Removed `query_id` field for `databricks.sdk.service.sql.GetQueryRequest`. + * Removed `order`, `page` and `q` fields for `databricks.sdk.service.sql.ListQueriesRequest`. + * Removed `include_metrics` field for `databricks.sdk.service.sql.ListQueryHistoryRequest`. + * Removed `can_edit`, `created_at`, `data_source_id`, `is_archived`, `is_draft`, `is_favorite`, `is_safe`, `last_modified_by`, `last_modified_by_id`, `latest_query_data_id`, `name`, `options`, `parent`, `permission_tier`, `query`, `query_hash`, `run_as_role`, `updated_at`, `user`, `user_id` and `visualizations` fields for `databricks.sdk.service.sql.Query`. + * Removed `statement_ids` field for `databricks.sdk.service.sql.QueryFilter`. + * Removed `can_subscribe_to_live_query` field for `databricks.sdk.service.sql.QueryInfo`. + * Removed `metadata_time_ms`, `planning_time_ms` and `query_execution_time_ms` fields for `databricks.sdk.service.sql.QueryMetrics`. + * Removed `created_at`, `description`, `name`, `options`, `query` and `updated_at` fields for `databricks.sdk.service.sql.Visualization`. + +OpenAPI SHA: f98c07f9c71f579de65d2587bb0292f83d10e55d, Date: 2024-08-12 + ## 0.29.0 ### Breaking Changes diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 18b980a55..249d7498e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,54 +18,22 @@ Code style is enforced by a formatter check in your pull request. We use [yapf]( ## Signed Commits This repo requires all contributors to sign their commits. To configure this, you can follow [Github's documentation](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) to create a GPG key, upload it to your Github account, and configure your git client to sign commits. -## Sign your work -The sign-off is a simple line at the end of the explanation for the patch. Your signature certifies that you wrote the patch or otherwise have the right to pass it on as an open-source patch. The rules are pretty simple: if you can certify the below (from developercertificate.org): +## Developer Certificate of Origin -``` -Developer Certificate of Origin -Version 1.1 - -Copyright (C) 2004, 2006 The Linux Foundation and its contributors. -1 Letterman Drive -Suite D4700 -San Francisco, CA, 94129 - -Everyone is permitted to copy and distribute verbatim copies of this -license document, but changing it is not allowed. - - -Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -(a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or +To contribute to this repository, you must sign off your commits to certify +that you have the right to contribute the code and that it complies with the +open source license. The rules are pretty simple, if you can certify the +content of [DCO](./DCO), then simply add a "Signed-off-by" line to your +commit message to certify your compliance. Please use your real name as +pseudonymous/anonymous contributions are not accepted. -(b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - -(c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - -(d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. +``` +Signed-off-by: Joe Smith ``` -Then you just add a line to every git commit message: +If you set your `user.name` and `user.email` git configs, you can sign your +commit automatically with `git commit -s`: ``` -Signed-off-by: Joe Smith +git commit -s -m "Your commit message" ``` - -If you set your `user.name` and `user.email` git configs, you can sign your commit automatically with git commit -s. -You must use your real name (sorry, no pseudonyms or anonymous contributions). diff --git a/DCO b/DCO new file mode 100644 index 000000000..d4f11dfce --- /dev/null +++ b/DCO @@ -0,0 +1,25 @@ +Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +(a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +(b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +(c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +(d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. diff --git a/Makefile b/Makefile index 9f6de61aa..eb8fe8397 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,10 @@ dev: python3 -m venv .venv +ifeq ($(OS), Windows_NT) + .venv\Scripts\activate +else . .venv/bin/activate +endif pip install '.[dev]' install: diff --git a/NOTICE b/NOTICE index 2a353a6c8..c05cdd318 100644 --- a/NOTICE +++ b/NOTICE @@ -12,8 +12,22 @@ googleapis/google-auth-library-python - https://github.com/googleapis/google-aut Copyright google-auth-library-python authors License - https://github.com/googleapis/google-auth-library-python/blob/main/LICENSE +openai/openai-python - https://github.com/openai/openai-python +Copyright 2024 OpenAI +License - https://github.com/openai/openai-python/blob/main/LICENSE + This software contains code from the following open source projects, licensed under the BSD (3-clause) license. x/oauth2 - https://cs.opensource.google/go/x/oauth2/+/master:oauth2.go Copyright 2014 The Go Authors. All rights reserved. License - https://cs.opensource.google/go/x/oauth2/+/master:LICENSE + +encode/httpx - https://github.com/encode/httpx +Copyright 2019, Encode OSS Ltd +License - https://github.com/encode/httpx/blob/master/LICENSE.md + +This software contains code from the following open source projects, licensed under the MIT license: + +langchain-ai/langchain - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai +Copyright 2023 LangChain, Inc. +License - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai/LICENSE diff --git a/README.md b/README.md index 66e99e579..f17435aef 100644 --- a/README.md +++ b/README.md @@ -45,6 +45,7 @@ The SDK's internal HTTP client is robust and handles failures on different level - [Long-running operations](#long-running-operations) - [Paginated responses](#paginated-responses) - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth) +- [User Agent Request Attribution](#user-agent-request-attribution) - [Error handling](#error-handling) - [Logging](#logging) - [Integration with `dbutils`](#interaction-with-dbutils) @@ -523,6 +524,29 @@ logging.info(f'Created new custom app: ' f'--client_secret {custom_app.client_secret}') ``` +## User Agent Request Attribution + +The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`. + +```python +from databricks.sdk import useragent +useragent.with_product("partner-abc") +useragent.with_partner("partner-xyz") +``` + +`with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one. + +```go +from databricks.sdk import useragent +useragent.with_product("databricks-example-product", "1.2.0") +``` + +If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header. + +If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings. + +Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method. + ## Error handling The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code. diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 05c95fb6f..159946461 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -1,12 +1,16 @@ +from typing import Optional + import databricks.sdk.core as client import databricks.sdk.dbutils as dbutils from databricks.sdk import azure from databricks.sdk.credentials_provider import CredentialsStrategy from databricks.sdk.mixins.compute import ClustersExt from databricks.sdk.mixins.files import DbfsExt +from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt from databricks.sdk.mixins.workspace import WorkspaceExt +from databricks.sdk.service.apps import AppsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI, - LogDeliveryAPI) + LogDeliveryAPI, UsageDashboardsAPI) from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, AccountMetastoresAPI, AccountStorageCredentialsAPI, @@ -16,18 +20,21 @@ GrantsAPI, MetastoresAPI, ModelVersionsAPI, OnlineTablesAPI, QualityMonitorsAPI, - RegisteredModelsAPI, SchemasAPI, + RegisteredModelsAPI, + ResourceQuotasAPI, SchemasAPI, StorageCredentialsAPI, SystemSchemasAPI, TableConstraintsAPI, TablesAPI, + TemporaryTableCredentialsAPI, VolumesAPI, WorkspaceBindingsAPI) from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI, CommandExecutionAPI, GlobalInitScriptsAPI, InstancePoolsAPI, InstanceProfilesAPI, LibrariesAPI, + PolicyComplianceForClustersAPI, PolicyFamiliesAPI) -from databricks.sdk.service.dashboards import LakeviewAPI +from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI from databricks.sdk.service.iam import (AccountAccessControlAPI, AccountAccessControlProxyAPI, @@ -37,7 +44,7 @@ GroupsAPI, PermissionMigrationAPI, PermissionsAPI, ServicePrincipalsAPI, UsersAPI, WorkspaceAssignmentAPI) -from databricks.sdk.service.jobs import JobsAPI +from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI from databricks.sdk.service.marketplace import ( ConsumerFulfillmentsAPI, ConsumerInstallationsAPI, ConsumerListingsAPI, ConsumerPersonalizationRequestsAPI, ConsumerProvidersAPI, @@ -55,7 +62,8 @@ NetworksAPI, PrivateAccessAPI, StorageAPI, VpcEndpointsAPI, Workspace, WorkspacesAPI) -from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI +from databricks.sdk.service.serving import (ServingEndpointsAPI, + ServingEndpointsDataPlaneAPI) from databricks.sdk.service.settings import (AccountIpAccessListsAPI, AccountSettingsAPI, AutomaticClusterUpdateAPI, @@ -63,10 +71,14 @@ CredentialsManagerAPI, CspEnablementAccountAPI, DefaultNamespaceAPI, + DisableLegacyAccessAPI, + DisableLegacyDbfsAPI, + DisableLegacyFeaturesAPI, EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI, NetworkConnectivityAPI, + NotificationDestinationsAPI, PersonalComputeAPI, RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, @@ -74,11 +86,13 @@ from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI, RecipientActivationAPI, RecipientsAPI, SharesAPI) -from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI, - DashboardWidgetsAPI, DataSourcesAPI, - DbsqlPermissionsAPI, QueriesAPI, +from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI, + DashboardsAPI, DashboardWidgetsAPI, + DataSourcesAPI, DbsqlPermissionsAPI, + QueriesAPI, QueriesLegacyAPI, QueryHistoryAPI, QueryVisualizationsAPI, + QueryVisualizationsLegacyAPI, StatementExecutionAPI, WarehousesAPI) from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, VectorSearchIndexesAPI) @@ -109,31 +123,31 @@ class WorkspaceClient: def __init__(self, *, - host: str = None, - account_id: str = None, - username: str = None, - password: str = None, - client_id: str = None, - client_secret: str = None, - token: str = None, - profile: str = None, - config_file: str = None, - azure_workspace_resource_id: str = None, - azure_client_secret: str = None, - azure_client_id: str = None, - azure_tenant_id: str = None, - azure_environment: str = None, - auth_type: str = None, - cluster_id: str = None, - google_credentials: str = None, - google_service_account: str = None, - debug_truncate_bytes: int = None, - debug_headers: bool = None, + host: Optional[str] = None, + account_id: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + token: Optional[str] = None, + profile: Optional[str] = None, + config_file: Optional[str] = None, + azure_workspace_resource_id: Optional[str] = None, + azure_client_secret: Optional[str] = None, + azure_client_id: Optional[str] = None, + azure_tenant_id: Optional[str] = None, + azure_environment: Optional[str] = None, + auth_type: Optional[str] = None, + cluster_id: Optional[str] = None, + google_credentials: Optional[str] = None, + google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, product="unknown", product_version="0.0.0", - credentials_strategy: CredentialsStrategy = None, - credentials_provider: CredentialsStrategy = None, - config: client.Config = None): + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None): if not config: config = client.Config(host=host, account_id=account_id, @@ -162,8 +176,10 @@ def __init__(self, self._config = config.copy() self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) + serving_endpoints = ServingEndpointsExt(self._api_client) self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client) self._alerts = AlertsAPI(self._api_client) + self._alerts_legacy = AlertsLegacyAPI(self._api_client) self._apps = AppsAPI(self._api_client) self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client) self._catalogs = CatalogsAPI(self._api_client) @@ -188,6 +204,7 @@ def __init__(self, self._external_locations = ExternalLocationsAPI(self._api_client) self._files = FilesAPI(self._api_client) self._functions = FunctionsAPI(self._api_client) + self._genie = GenieAPI(self._api_client) self._git_credentials = GitCredentialsAPI(self._api_client) self._global_init_scripts = GlobalInitScriptsAPI(self._api_client) self._grants = GrantsAPI(self._api_client) @@ -201,10 +218,13 @@ def __init__(self, self._metastores = MetastoresAPI(self._api_client) self._model_registry = ModelRegistryAPI(self._api_client) self._model_versions = ModelVersionsAPI(self._api_client) + self._notification_destinations = NotificationDestinationsAPI(self._api_client) self._online_tables = OnlineTablesAPI(self._api_client) self._permission_migration = PermissionMigrationAPI(self._api_client) self._permissions = PermissionsAPI(self._api_client) self._pipelines = PipelinesAPI(self._api_client) + self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client) + self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client) self._policy_families = PolicyFamiliesAPI(self._api_client) self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client) self._provider_exchanges = ProviderExchangesAPI(self._api_client) @@ -217,16 +237,20 @@ def __init__(self, self._providers = ProvidersAPI(self._api_client) self._quality_monitors = QualityMonitorsAPI(self._api_client) self._queries = QueriesAPI(self._api_client) + self._queries_legacy = QueriesLegacyAPI(self._api_client) self._query_history = QueryHistoryAPI(self._api_client) self._query_visualizations = QueryVisualizationsAPI(self._api_client) + self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client) self._recipient_activation = RecipientActivationAPI(self._api_client) self._recipients = RecipientsAPI(self._api_client) self._registered_models = RegisteredModelsAPI(self._api_client) self._repos = ReposAPI(self._api_client) + self._resource_quotas = ResourceQuotasAPI(self._api_client) self._schemas = SchemasAPI(self._api_client) self._secrets = SecretsAPI(self._api_client) self._service_principals = ServicePrincipalsAPI(self._api_client) - self._serving_endpoints = ServingEndpointsAPI(self._api_client) + self._serving_endpoints = serving_endpoints + self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints) self._settings = SettingsAPI(self._api_client) self._shares = SharesAPI(self._api_client) self._statement_execution = StatementExecutionAPI(self._api_client) @@ -234,6 +258,7 @@ def __init__(self, self._system_schemas = SystemSchemasAPI(self._api_client) self._table_constraints = TableConstraintsAPI(self._api_client) self._tables = TablesAPI(self._api_client) + self._temporary_table_credentials = TemporaryTableCredentialsAPI(self._api_client) self._token_management = TokenManagementAPI(self._api_client) self._tokens = TokensAPI(self._api_client) self._users = UsersAPI(self._api_client) @@ -267,6 +292,11 @@ def alerts(self) -> AlertsAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts + @property + def alerts_legacy(self) -> AlertsLegacyAPI: + """The alerts API can be used to perform CRUD operations on alerts.""" + return self._alerts_legacy + @property def apps(self) -> AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" @@ -387,6 +417,11 @@ def functions(self) -> FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" return self._functions + @property + def genie(self) -> GenieAPI: + """Genie provides a no-code experience for business users, powered by AI/BI.""" + return self._genie + @property def git_credentials(self) -> GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user.""" @@ -452,6 +487,11 @@ def model_versions(self) -> ModelVersionsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._model_versions + @property + def notification_destinations(self) -> NotificationDestinationsAPI: + """The notification destinations API lets you programmatically manage a workspace's notification destinations.""" + return self._notification_destinations + @property def online_tables(self) -> OnlineTablesAPI: """Online tables provide lower latency and higher QPS access to data from Delta tables.""" @@ -459,7 +499,7 @@ def online_tables(self) -> OnlineTablesAPI: @property def permission_migration(self) -> PermissionMigrationAPI: - """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx.""" + """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx.""" return self._permission_migration @property @@ -472,6 +512,16 @@ def pipelines(self) -> PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.""" return self._pipelines + @property + def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI: + """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" + return self._policy_compliance_for_clusters + + @property + def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI: + """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.""" + return self._policy_compliance_for_jobs + @property def policy_families(self) -> PolicyFamiliesAPI: """View available policy families.""" @@ -524,19 +574,29 @@ def quality_monitors(self) -> QualityMonitorsAPI: @property def queries(self) -> QueriesAPI: - """These endpoints are used for CRUD operations on query definitions.""" + """The queries API can be used to perform CRUD operations on queries.""" return self._queries + @property + def queries_legacy(self) -> QueriesLegacyAPI: + """These endpoints are used for CRUD operations on query definitions.""" + return self._queries_legacy + @property def query_history(self) -> QueryHistoryAPI: - """Access the history of queries through SQL warehouses.""" + """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" return self._query_history @property def query_visualizations(self) -> QueryVisualizationsAPI: - """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" + """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace.""" return self._query_visualizations + @property + def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI: + """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" + return self._query_visualizations_legacy + @property def recipient_activation(self) -> RecipientActivationAPI: """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`.""" @@ -557,6 +617,11 @@ def repos(self) -> ReposAPI: """The Repos API allows users to manage their git repos.""" return self._repos + @property + def resource_quotas(self) -> ResourceQuotasAPI: + """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.""" + return self._resource_quotas + @property def schemas(self) -> SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.""" @@ -573,10 +638,15 @@ def service_principals(self) -> ServicePrincipalsAPI: return self._service_principals @property - def serving_endpoints(self) -> ServingEndpointsAPI: + def serving_endpoints(self) -> ServingEndpointsExt: """The Serving Endpoints API allows you to create, update, and delete model serving endpoints.""" return self._serving_endpoints + @property + def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI: + """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service.""" + return self._serving_endpoints_data_plane + @property def settings(self) -> SettingsAPI: """Workspace Settings API allows users to manage settings at the workspace level.""" @@ -612,6 +682,11 @@ def tables(self) -> TablesAPI: """A table resides in the third layer of Unity Catalog’s three-level namespace.""" return self._tables + @property + def temporary_table_credentials(self) -> TemporaryTableCredentialsAPI: + """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.""" + return self._temporary_table_credentials + @property def token_management(self) -> TokenManagementAPI: """Enables administrators to get all tokens and delete tokens for other users.""" @@ -680,31 +755,31 @@ class AccountClient: def __init__(self, *, - host: str = None, - account_id: str = None, - username: str = None, - password: str = None, - client_id: str = None, - client_secret: str = None, - token: str = None, - profile: str = None, - config_file: str = None, - azure_workspace_resource_id: str = None, - azure_client_secret: str = None, - azure_client_id: str = None, - azure_tenant_id: str = None, - azure_environment: str = None, - auth_type: str = None, - cluster_id: str = None, - google_credentials: str = None, - google_service_account: str = None, - debug_truncate_bytes: int = None, - debug_headers: bool = None, + host: Optional[str] = None, + account_id: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + token: Optional[str] = None, + profile: Optional[str] = None, + config_file: Optional[str] = None, + azure_workspace_resource_id: Optional[str] = None, + azure_client_secret: Optional[str] = None, + azure_client_id: Optional[str] = None, + azure_tenant_id: Optional[str] = None, + azure_environment: Optional[str] = None, + auth_type: Optional[str] = None, + cluster_id: Optional[str] = None, + google_credentials: Optional[str] = None, + google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, product="unknown", product_version="0.0.0", - credentials_strategy: CredentialsStrategy = None, - credentials_provider: CredentialsStrategy = None, - config: client.Config = None): + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None): if not config: config = client.Config(host=host, account_id=account_id, @@ -734,7 +809,6 @@ def __init__(self, self._api_client = client.ApiClient(self._config) self._access_control = AccountAccessControlAPI(self._api_client) self._billable_usage = BillableUsageAPI(self._api_client) - self._budgets = BudgetsAPI(self._api_client) self._credentials = CredentialsAPI(self._api_client) self._custom_app_integration = CustomAppIntegrationAPI(self._api_client) self._encryption_keys = EncryptionKeysAPI(self._api_client) @@ -753,10 +827,12 @@ def __init__(self, self._settings = AccountSettingsAPI(self._api_client) self._storage = StorageAPI(self._api_client) self._storage_credentials = AccountStorageCredentialsAPI(self._api_client) + self._usage_dashboards = UsageDashboardsAPI(self._api_client) self._users = AccountUsersAPI(self._api_client) self._vpc_endpoints = VpcEndpointsAPI(self._api_client) self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client) self._workspaces = WorkspacesAPI(self._api_client) + self._budgets = BudgetsAPI(self._api_client) @property def config(self) -> client.Config: @@ -776,11 +852,6 @@ def billable_usage(self) -> BillableUsageAPI: """This API allows you to download billable usage logs for the specified account and date range.""" return self._billable_usage - @property - def budgets(self) -> BudgetsAPI: - """These APIs manage budget configuration including notifications for exceeding a budget for a period.""" - return self._budgets - @property def credentials(self) -> CredentialsAPI: """These APIs manage credential configurations for this workspace.""" @@ -788,7 +859,7 @@ def credentials(self) -> CredentialsAPI: @property def custom_app_integration(self) -> CustomAppIntegrationAPI: - """These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" + """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" return self._custom_app_integration @property @@ -843,7 +914,7 @@ def private_access(self) -> PrivateAccessAPI: @property def published_app_integration(self) -> PublishedAppIntegrationAPI: - """These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" + """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" return self._published_app_integration @property @@ -871,6 +942,11 @@ def storage_credentials(self) -> AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" return self._storage_credentials + @property + def usage_dashboards(self) -> UsageDashboardsAPI: + """These APIs manage usage dashboards for this account.""" + return self._usage_dashboards + @property def users(self) -> AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses.""" @@ -891,6 +967,11 @@ def workspaces(self) -> WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces + @property + def budgets(self) -> BudgetsAPI: + """These APIs manage budget configurations for this account.""" + return self._budgets + def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: """Constructs a ``WorkspaceClient`` for the given workspace. diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py new file mode 100644 index 000000000..95ce39cbe --- /dev/null +++ b/databricks/sdk/_base_client.py @@ -0,0 +1,343 @@ +import logging +import urllib.parse +from datetime import timedelta +from types import TracebackType +from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List, + Optional, Type, Union) + +import requests +import requests.adapters + +from . import useragent +from .casing import Casing +from .clock import Clock, RealClock +from .errors import DatabricksError, _ErrorCustomizer, _Parser +from .logger import RoundTrip +from .retries import retried + +logger = logging.getLogger('databricks.sdk') + + +def _fix_host_if_needed(host: Optional[str]) -> Optional[str]: + if not host: + return host + + # Add a default scheme if it's missing + if '://' not in host: + host = 'https://' + host + + o = urllib.parse.urlparse(host) + # remove trailing slash + path = o.path.rstrip('/') + # remove port if 443 + netloc = o.netloc + if o.port == 443: + netloc = netloc.split(':')[0] + + return urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment)) + + +class _BaseClient: + + def __init__(self, + debug_truncate_bytes: int = None, + retry_timeout_seconds: int = None, + user_agent_base: str = None, + header_factory: Callable[[], dict] = None, + max_connection_pools: int = None, + max_connections_per_pool: int = None, + pool_block: bool = True, + http_timeout_seconds: float = None, + extra_error_customizers: List[_ErrorCustomizer] = None, + debug_headers: bool = False, + clock: Clock = None): + """ + :param debug_truncate_bytes: + :param retry_timeout_seconds: + :param user_agent_base: + :param header_factory: A function that returns a dictionary of headers to include in the request. + :param max_connection_pools: Number of urllib3 connection pools to cache before discarding the least + recently used pool. Python requests default value is 10. + :param max_connections_per_pool: The maximum number of connections to save in the pool. Improves performance + in multithreaded situations. For now, we're setting it to the same value as connection_pool_size. + :param pool_block: If pool_block is False, then more connections will are created, but not saved after the + first use. Blocks when no free connections are available. urllib3 ensures that no more than + pool_maxsize connections are used at a time. Prevents platform from flooding. By default, requests library + doesn't block. + :param http_timeout_seconds: + :param extra_error_customizers: + :param debug_headers: Whether to include debug headers in the request log. + :param clock: Clock object to use for time-related operations. + """ + + self._debug_truncate_bytes = debug_truncate_bytes or 96 + self._debug_headers = debug_headers + self._retry_timeout_seconds = retry_timeout_seconds or 300 + self._user_agent_base = user_agent_base or useragent.to_string() + self._header_factory = header_factory + self._clock = clock or RealClock() + self._session = requests.Session() + self._session.auth = self._authenticate + + # We don't use `max_retries` from HTTPAdapter to align with a more production-ready + # retry strategy established in the Databricks SDK for Go. See _is_retryable and + # @retried for more details. + http_adapter = requests.adapters.HTTPAdapter(pool_connections=max_connections_per_pool or 20, + pool_maxsize=max_connection_pools or 20, + pool_block=pool_block) + self._session.mount("https://", http_adapter) + + # Default to 60 seconds + self._http_timeout_seconds = http_timeout_seconds or 60 + + self._error_parser = _Parser(extra_error_customizers=extra_error_customizers) + + def _authenticate(self, r: requests.PreparedRequest) -> requests.PreparedRequest: + if self._header_factory: + headers = self._header_factory() + for k, v in headers.items(): + r.headers[k] = v + return r + + @staticmethod + def _fix_query_string(query: Optional[dict] = None) -> Optional[dict]: + # Convert True -> "true" for Databricks APIs to understand booleans. + # See: https://github.com/databricks/databricks-sdk-py/issues/142 + if query is None: + return None + with_fixed_bools = {k: v if type(v) != bool else ('true' if v else 'false') for k, v in query.items()} + + # Query parameters may be nested, e.g. + # {'filter_by': {'user_ids': [123, 456]}} + # The HTTP-compatible representation of this is + # filter_by.user_ids=123&filter_by.user_ids=456 + # To achieve this, we convert the above dictionary to + # {'filter_by.user_ids': [123, 456]} + # See the following for more information: + # https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule + def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]: + for k1, v1 in d.items(): + if isinstance(v1, dict): + v1 = dict(flatten_dict(v1)) + for k2, v2 in v1.items(): + yield f"{k1}.{k2}", v2 + else: + yield k1, v1 + + flattened = dict(flatten_dict(with_fixed_bools)) + return flattened + + def do(self, + method: str, + url: str, + query: dict = None, + headers: dict = None, + body: dict = None, + raw: bool = False, + files=None, + data=None, + auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None, + response_headers: List[str] = None) -> Union[dict, list, BinaryIO]: + if headers is None: + headers = {} + headers['User-Agent'] = self._user_agent_base + retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds), + is_retryable=self._is_retryable, + clock=self._clock) + response = retryable(self._perform)(method, + url, + query=query, + headers=headers, + body=body, + raw=raw, + files=files, + data=data, + auth=auth) + + resp = dict() + for header in response_headers if response_headers else []: + resp[header] = response.headers.get(Casing.to_header_case(header)) + if raw: + resp["contents"] = _StreamingResponse(response) + return resp + if not len(response.content): + return resp + + json_response = response.json() + if json_response is None: + return resp + + if isinstance(json_response, list): + return json_response + + return {**resp, **json_response} + + @staticmethod + def _is_retryable(err: BaseException) -> Optional[str]: + # this method is Databricks-specific port of urllib3 retries + # (see https://github.com/urllib3/urllib3/blob/main/src/urllib3/util/retry.py) + # and Databricks SDK for Go retries + # (see https://github.com/databricks/databricks-sdk-go/blob/main/apierr/errors.go) + from urllib3.exceptions import ProxyError + if isinstance(err, ProxyError): + err = err.original_error + if isinstance(err, requests.ConnectionError): + # corresponds to `connection reset by peer` and `connection refused` errors from Go, + # which are generally related to the temporary glitches in the networking stack, + # also caused by endpoint protection software, like ZScaler, to drop connections while + # not yet authenticated. + # + # return a simple string for debug log readability, as `raise TimeoutError(...) from err` + # will bubble up the original exception in case we reach max retries. + return f'cannot connect' + if isinstance(err, requests.Timeout): + # corresponds to `TLS handshake timeout` and `i/o timeout` in Go. + # + # return a simple string for debug log readability, as `raise TimeoutError(...) from err` + # will bubble up the original exception in case we reach max retries. + return f'timeout' + if isinstance(err, DatabricksError): + message = str(err) + transient_error_string_matches = [ + "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException", + "does not have any associated worker environments", "There is no worker environment with id", + "Unknown worker environment", "ClusterNotReadyException", "Unexpected error", + "Please try again later or try a faster operation.", + "RPC token bucket limit has been exceeded", + ] + for substring in transient_error_string_matches: + if substring not in message: + continue + return f'matched {substring}' + return None + + def _perform(self, + method: str, + url: str, + query: dict = None, + headers: dict = None, + body: dict = None, + raw: bool = False, + files=None, + data=None, + auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None): + response = self._session.request(method, + url, + params=self._fix_query_string(query), + json=body, + headers=headers, + files=files, + data=data, + auth=auth, + stream=raw, + timeout=self._http_timeout_seconds) + self._record_request_log(response, raw=raw or data is not None or files is not None) + error = self._error_parser.get_api_error(response) + if error is not None: + raise error from None + return response + + def _record_request_log(self, response: requests.Response, raw: bool = False) -> None: + if not logger.isEnabledFor(logging.DEBUG): + return + logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate()) + + +class _StreamingResponse(BinaryIO): + _response: requests.Response + _buffer: bytes + _content: Union[Iterator[bytes], None] + _chunk_size: Union[int, None] + _closed: bool = False + + def fileno(self) -> int: + pass + + def flush(self) -> int: + pass + + def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None): + self._response = response + self._buffer = b'' + self._content = None + self._chunk_size = chunk_size + + def _open(self) -> None: + if self._closed: + raise ValueError("I/O operation on closed file") + if not self._content: + self._content = self._response.iter_content(chunk_size=self._chunk_size) + + def __enter__(self) -> BinaryIO: + self._open() + return self + + def set_chunk_size(self, chunk_size: Union[int, None]) -> None: + self._chunk_size = chunk_size + + def close(self) -> None: + self._response.close() + self._closed = True + + def isatty(self) -> bool: + return False + + def read(self, n: int = -1) -> bytes: + self._open() + read_everything = n < 0 + remaining_bytes = n + res = b'' + while remaining_bytes > 0 or read_everything: + if len(self._buffer) == 0: + try: + self._buffer = next(self._content) + except StopIteration: + break + bytes_available = len(self._buffer) + to_read = bytes_available if read_everything else min(remaining_bytes, bytes_available) + res += self._buffer[:to_read] + self._buffer = self._buffer[to_read:] + remaining_bytes -= to_read + return res + + def readable(self) -> bool: + return self._content is not None + + def readline(self, __limit: int = ...) -> bytes: + raise NotImplementedError() + + def readlines(self, __hint: int = ...) -> List[bytes]: + raise NotImplementedError() + + def seek(self, __offset: int, __whence: int = ...) -> int: + raise NotImplementedError() + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + raise NotImplementedError() + + def truncate(self, __size: Union[int, None] = ...) -> int: + raise NotImplementedError() + + def writable(self) -> bool: + return False + + def write(self, s: Union[bytes, bytearray]) -> int: + raise NotImplementedError() + + def writelines(self, lines: Iterable[bytes]) -> None: + raise NotImplementedError() + + def __next__(self) -> bytes: + return self.read(1) + + def __iter__(self) -> Iterator[bytes]: + return self._content + + def __exit__(self, t: Union[Type[BaseException], None], value: Union[BaseException, None], + traceback: Union[TracebackType, None]) -> None: + self._content = None + self._buffer = b'' + self.close() diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py index ca1c2cfc1..b4efdf603 100644 --- a/databricks/sdk/config.py +++ b/databricks/sdk/config.py @@ -3,19 +3,21 @@ import logging import os import pathlib -import platform import sys import urllib.parse -from typing import Dict, Iterable, List, Optional, Tuple +from typing import Dict, Iterable, Optional import requests +from . import useragent +from ._base_client import _fix_host_if_needed from .clock import Clock, RealClock from .credentials_provider import CredentialsStrategy, DefaultCredentials from .environments import (ALL_ENVS, AzureEnvironment, Cloud, DatabricksEnvironment, get_environment_for_hostname) -from .oauth import OidcEndpoints, Token -from .version import __version__ +from .oauth import (OidcEndpoints, Token, get_account_endpoints, + get_azure_entra_id_workspace_endpoints, + get_workspace_endpoints) logger = logging.getLogger('databricks.sdk') @@ -44,30 +46,14 @@ def __repr__(self) -> str: return f"" -_DEFAULT_PRODUCT_NAME = 'unknown' -_DEFAULT_PRODUCT_VERSION = '0.0.0' -_STATIC_USER_AGENT: Tuple[str, str, List[str]] = (_DEFAULT_PRODUCT_NAME, _DEFAULT_PRODUCT_VERSION, []) - - def with_product(product: str, product_version: str): """[INTERNAL API] Change the product name and version used in the User-Agent header.""" - global _STATIC_USER_AGENT - prev_product, prev_version, prev_other_info = _STATIC_USER_AGENT - logger.debug(f'Changing product from {prev_product}/{prev_version} to {product}/{product_version}') - _STATIC_USER_AGENT = product, product_version, prev_other_info + useragent.with_product(product, product_version) def with_user_agent_extra(key: str, value: str): """[INTERNAL API] Add extra metadata to the User-Agent header when developing a library.""" - global _STATIC_USER_AGENT - product_name, product_version, other_info = _STATIC_USER_AGENT - for item in other_info: - if item.startswith(f"{key}/"): - # ensure that we don't have duplicates - other_info.remove(item) - break - other_info.append(f"{key}/{value}") - _STATIC_USER_AGENT = product_name, product_version, other_info + useragent.with_extra(key, value) class Config: @@ -109,23 +95,14 @@ class Config: def __init__(self, *, # Deprecated. Use credentials_strategy instead. - credentials_provider: CredentialsStrategy = None, - credentials_strategy: CredentialsStrategy = None, - product=_DEFAULT_PRODUCT_NAME, - product_version=_DEFAULT_PRODUCT_VERSION, - clock: Clock = None, + credentials_provider: Optional[CredentialsStrategy] = None, + credentials_strategy: Optional[CredentialsStrategy] = None, + product=None, + product_version=None, + clock: Optional[Clock] = None, **kwargs): self._header_factory = None self._inner = {} - # as in SDK for Go, pull information from global static user agent context, - # so that we can track additional metadata for mid-stream libraries, as well - # as for cases, when the downstream product is used as a library and is not - # configured with a proper product name and version. - static_product, static_version, _ = _STATIC_USER_AGENT - if product == _DEFAULT_PRODUCT_NAME: - product = static_product - if product_version == _DEFAULT_PRODUCT_VERSION: - product_version = static_version self._user_agent_other_info = [] if credentials_strategy and credentials_provider: raise ValueError( @@ -147,8 +124,7 @@ def __init__(self, self._fix_host_if_needed() self._validate() self.init_auth() - self._product = product - self._product_version = product_version + self._init_product(product, product_version) except ValueError as e: message = self.wrap_debug_info(str(e)) raise ValueError(message) from e @@ -260,47 +236,19 @@ def is_any_auth_configured(self) -> bool: @property def user_agent(self): """ Returns User-Agent header used by this SDK """ - py_version = platform.python_version() - os_name = platform.uname().system.lower() - - ua = [ - f"{self._product}/{self._product_version}", f"databricks-sdk-py/{__version__}", - f"python/{py_version}", f"os/{os_name}", f"auth/{self.auth_type}", - ] - if len(self._user_agent_other_info) > 0: - ua.append(' '.join(self._user_agent_other_info)) - # as in SDK for Go, pull information from global static user agent context, - # so that we can track additional metadata for mid-stream libraries. this value - # is shared across all instances of Config objects intentionally. - _, _, static_info = _STATIC_USER_AGENT - if len(static_info) > 0: - ua.append(' '.join(static_info)) - if len(self._upstream_user_agent) > 0: - ua.append(self._upstream_user_agent) - if 'DATABRICKS_RUNTIME_VERSION' in os.environ: - runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION'] - if runtime_version != '': - runtime_version = self._sanitize_header_value(runtime_version) - ua.append(f'runtime/{runtime_version}') - - return ' '.join(ua) - @staticmethod - def _sanitize_header_value(value: str) -> str: - value = value.replace(' ', '-') - value = value.replace('/', '-') - return value + # global user agent includes SDK version, product name & version, platform info, + # and global extra info. Config can have specific extra info associated with it, + # such as an override product, auth type, and other user-defined information. + return useragent.to_string(self._product_info, + [("auth", self.auth_type)] + self._user_agent_other_info) @property def _upstream_user_agent(self) -> str: - product = os.environ.get('DATABRICKS_SDK_UPSTREAM', None) - product_version = os.environ.get('DATABRICKS_SDK_UPSTREAM_VERSION', None) - if product is not None and product_version is not None: - return f"upstream/{product} upstream-version/{product_version}" - return "" + return " ".join(f"{k}/{v}" for k, v in useragent._get_upstream_user_agent_info()) def with_user_agent_extra(self, key: str, value: str) -> 'Config': - self._user_agent_other_info.append(f"{key}/{value}") + self._user_agent_other_info.append((key, value)) return self @property @@ -309,24 +257,10 @@ def oidc_endpoints(self) -> Optional[OidcEndpoints]: if not self.host: return None if self.is_azure and self.azure_client_id: - # Retrieve authorize endpoint to retrieve token endpoint after - res = requests.get(f'{self.host}/oidc/oauth2/v2.0/authorize', allow_redirects=False) - real_auth_url = res.headers.get('location') - if not real_auth_url: - return None - return OidcEndpoints(authorization_endpoint=real_auth_url, - token_endpoint=real_auth_url.replace('/authorize', '/token')) + return get_azure_entra_id_workspace_endpoints(self.host) if self.is_account_client and self.account_id: - prefix = f'{self.host}/oidc/accounts/{self.account_id}' - return OidcEndpoints(authorization_endpoint=f'{prefix}/v1/authorize', - token_endpoint=f'{prefix}/v1/token') - oidc = f'{self.host}/oidc/.well-known/oauth-authorization-server' - res = requests.get(oidc) - if res.status_code != 200: - return None - auth_metadata = res.json() - return OidcEndpoints(authorization_endpoint=auth_metadata.get('authorization_endpoint'), - token_endpoint=auth_metadata.get('token_endpoint')) + return get_account_endpoints(self.host, self.account_id) + return get_workspace_endpoints(self.host) def debug_string(self) -> str: """ Returns log-friendly representation of configured attributes """ @@ -401,15 +335,36 @@ def attributes(cls) -> Iterable[ConfigAttribute]: return cls._attributes def _fix_host_if_needed(self): - if not self.host: + updated_host = _fix_host_if_needed(self.host) + if updated_host: + self.host = updated_host + + def load_azure_tenant_id(self): + """[Internal] Load the Azure tenant ID from the Azure Databricks login page. + + If the tenant ID is already set, this method does nothing.""" + if not self.is_azure or self.azure_tenant_id is not None or self.host is None: return - # fix url to remove trailing slash - o = urllib.parse.urlparse(self.host) - if not o.hostname: - # only hostname is specified - self.host = f"https://{self.host}" - else: - self.host = f"{o.scheme}://{o.netloc}" + login_url = f'{self.host}/aad/auth' + logger.debug(f'Loading tenant ID from {login_url}') + resp = requests.get(login_url, allow_redirects=False) + if resp.status_code // 100 != 3: + logger.debug( + f'Failed to get tenant ID from {login_url}: expected status code 3xx, got {resp.status_code}') + return + entra_id_endpoint = resp.headers.get('Location') + if entra_id_endpoint is None: + logger.debug(f'No Location header in response from {login_url}') + return + # The Location header has the following form: https://login.microsoftonline.com//oauth2/authorize?... + # The domain may change depending on the Azure cloud (e.g. login.microsoftonline.us for US Government cloud). + url = urllib.parse.urlparse(entra_id_endpoint) + path_segments = url.path.split('/') + if len(path_segments) < 2: + logger.debug(f'Invalid path in Location header: {url.path}') + return + self.azure_tenant_id = path_segments[1] + logger.debug(f'Loaded tenant ID: {self.azure_tenant_id}') def _set_inner_config(self, keyword_args: Dict[str, any]): for attr in self.attributes(): @@ -498,6 +453,13 @@ def init_auth(self): except ValueError as e: raise ValueError(f'{self._credentials_strategy.auth_type()} auth: {e}') from e + def _init_product(self, product, product_version): + if product is not None or product_version is not None: + default_product, default_version = useragent.product() + self._product_info = (product or default_product, product_version or default_version) + else: + self._product_info = None + def __repr__(self): return f'<{self.debug_string()}>' diff --git a/databricks/sdk/core.py b/databricks/sdk/core.py index cacbad908..eab22cd71 100644 --- a/databricks/sdk/core.py +++ b/databricks/sdk/core.py @@ -1,21 +1,13 @@ import re -import urllib.parse -from datetime import timedelta -from json import JSONDecodeError -from types import TracebackType -from typing import Any, BinaryIO, Iterator, Type +from typing import BinaryIO from urllib.parse import urlencode -from requests.adapters import HTTPAdapter - -from .casing import Casing +from ._base_client import _BaseClient from .config import * # To preserve backwards compatibility (as these definitions were previously in this module) from .credentials_provider import * -from .errors import DatabricksError, error_mapper -from .errors.private_link import _is_private_link_redirect +from .errors import DatabricksError, _ErrorCustomizer from .oauth import retrieve_token -from .retries import retried __all__ = ['Config', 'DatabricksError'] @@ -27,51 +19,19 @@ class ApiClient: - _cfg: Config - _RETRY_AFTER_DEFAULT: int = 1 - - def __init__(self, cfg: Config = None): - - if cfg is None: - cfg = Config() + def __init__(self, cfg: Config): self._cfg = cfg - # See https://github.com/databricks/databricks-sdk-go/blob/main/client/client.go#L34-L35 - self._debug_truncate_bytes = cfg.debug_truncate_bytes if cfg.debug_truncate_bytes else 96 - self._retry_timeout_seconds = cfg.retry_timeout_seconds if cfg.retry_timeout_seconds else 300 - self._user_agent_base = cfg.user_agent - self._session = requests.Session() - self._session.auth = self._authenticate - - # Number of urllib3 connection pools to cache before discarding the least - # recently used pool. Python requests default value is 10. - pool_connections = cfg.max_connection_pools - if pool_connections is None: - pool_connections = 20 - - # The maximum number of connections to save in the pool. Improves performance - # in multithreaded situations. For now, we're setting it to the same value - # as connection_pool_size. - pool_maxsize = cfg.max_connections_per_pool - if cfg.max_connections_per_pool is None: - pool_maxsize = pool_connections - - # If pool_block is False, then more connections will are created, - # but not saved after the first use. Blocks when no free connections are available. - # urllib3 ensures that no more than pool_maxsize connections are used at a time. - # Prevents platform from flooding. By default, requests library doesn't block. - pool_block = True - - # We don't use `max_retries` from HTTPAdapter to align with a more production-ready - # retry strategy established in the Databricks SDK for Go. See _is_retryable and - # @retried for more details. - http_adapter = HTTPAdapter(pool_connections=pool_connections, - pool_maxsize=pool_maxsize, - pool_block=pool_block) - self._session.mount("https://", http_adapter) - - # Default to 60 seconds - self._http_timeout_seconds = cfg.http_timeout_seconds if cfg.http_timeout_seconds else 60 + self._api_client = _BaseClient(debug_truncate_bytes=cfg.debug_truncate_bytes, + retry_timeout_seconds=cfg.retry_timeout_seconds, + user_agent_base=cfg.user_agent, + header_factory=cfg.authenticate, + max_connection_pools=cfg.max_connection_pools, + max_connections_per_pool=cfg.max_connections_per_pool, + pool_block=True, + http_timeout_seconds=cfg.http_timeout_seconds, + extra_error_customizers=[_AddDebugErrorCustomizer(cfg)], + clock=cfg.clock) @property def account_id(self) -> str: @@ -81,40 +41,6 @@ def account_id(self) -> str: def is_account_client(self) -> bool: return self._cfg.is_account_client - def _authenticate(self, r: requests.PreparedRequest) -> requests.PreparedRequest: - headers = self._cfg.authenticate() - for k, v in headers.items(): - r.headers[k] = v - return r - - @staticmethod - def _fix_query_string(query: Optional[dict] = None) -> Optional[dict]: - # Convert True -> "true" for Databricks APIs to understand booleans. - # See: https://github.com/databricks/databricks-sdk-py/issues/142 - if query is None: - return None - with_fixed_bools = {k: v if type(v) != bool else ('true' if v else 'false') for k, v in query.items()} - - # Query parameters may be nested, e.g. - # {'filter_by': {'user_ids': [123, 456]}} - # The HTTP-compatible representation of this is - # filter_by.user_ids=123&filter_by.user_ids=456 - # To achieve this, we convert the above dictionary to - # {'filter_by.user_ids': [123, 456]} - # See the following for more information: - # https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule - def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]: - for k1, v1 in d.items(): - if isinstance(v1, dict): - v1 = dict(flatten_dict(v1)) - for k2, v2 in v1.items(): - yield f"{k1}.{k2}", v2 - else: - yield k1, v1 - - flattened = dict(flatten_dict(with_fixed_bools)) - return flattened - def get_oauth_token(self, auth_details: str) -> Token: if not self._cfg.auth_type: self._cfg.authenticate() @@ -133,353 +59,41 @@ def get_oauth_token(self, auth_details: str) -> Token: def do(self, method: str, - path: str, + path: str = None, + url: str = None, query: dict = None, headers: dict = None, body: dict = None, raw: bool = False, files=None, data=None, - response_headers: List[str] = None) -> Union[dict, BinaryIO]: - # Remove extra `/` from path for Files API - # Once we've fixed the OpenAPI spec, we can remove this - path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path) - if headers is None: - headers = {} - headers['User-Agent'] = self._user_agent_base - retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds), - is_retryable=self._is_retryable, - clock=self._cfg.clock) - response = retryable(self._perform)(method, - path, - query=query, - headers=headers, - body=body, - raw=raw, - files=files, - data=data) - - resp = dict() - for header in response_headers if response_headers else []: - resp[header] = response.headers.get(Casing.to_header_case(header)) - if raw: - resp["contents"] = StreamingResponse(response) - return resp - if not len(response.content): - return resp - - jsonResponse = response.json() - if jsonResponse is None: - return resp - - if isinstance(jsonResponse, list): - return jsonResponse - - return {**resp, **jsonResponse} - - @staticmethod - def _is_retryable(err: BaseException) -> Optional[str]: - # this method is Databricks-specific port of urllib3 retries - # (see https://github.com/urllib3/urllib3/blob/main/src/urllib3/util/retry.py) - # and Databricks SDK for Go retries - # (see https://github.com/databricks/databricks-sdk-go/blob/main/apierr/errors.go) - from urllib3.exceptions import ProxyError - if isinstance(err, ProxyError): - err = err.original_error - if isinstance(err, requests.ConnectionError): - # corresponds to `connection reset by peer` and `connection refused` errors from Go, - # which are generally related to the temporary glitches in the networking stack, - # also caused by endpoint protection software, like ZScaler, to drop connections while - # not yet authenticated. - # - # return a simple string for debug log readability, as `raise TimeoutError(...) from err` - # will bubble up the original exception in case we reach max retries. - return f'cannot connect' - if isinstance(err, requests.Timeout): - # corresponds to `TLS handshake timeout` and `i/o timeout` in Go. - # - # return a simple string for debug log readability, as `raise TimeoutError(...) from err` - # will bubble up the original exception in case we reach max retries. - return f'timeout' - if isinstance(err, DatabricksError): - message = str(err) - transient_error_string_matches = [ - "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException", - "does not have any associated worker environments", "There is no worker environment with id", - "Unknown worker environment", "ClusterNotReadyException", "Unexpected error", - "Please try again later or try a faster operation.", - "RPC token bucket limit has been exceeded", - ] - for substring in transient_error_string_matches: - if substring not in message: - continue - return f'matched {substring}' - return None - - @classmethod - def _parse_retry_after(cls, response: requests.Response) -> Optional[int]: - retry_after = response.headers.get("Retry-After") - if retry_after is None: - # 429 requests should include a `Retry-After` header, but if it's missing, - # we default to 1 second. - return cls._RETRY_AFTER_DEFAULT - # If the request is throttled, try parse the `Retry-After` header and sleep - # for the specified number of seconds. Note that this header can contain either - # an integer or a RFC1123 datetime string. - # See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After - # - # For simplicity, we only try to parse it as an integer, as this is what Databricks - # platform returns. Otherwise, we fall back and don't sleep. - try: - return int(retry_after) - except ValueError: - logger.debug(f'Invalid Retry-After header received: {retry_after}. Defaulting to 1') - # defaulting to 1 sleep second to make self._is_retryable() simpler - return cls._RETRY_AFTER_DEFAULT - - def _perform(self, - method: str, - path: str, - query: dict = None, - headers: dict = None, - body: dict = None, - raw: bool = False, - files=None, - data=None): - response = self._session.request(method, - f"{self._cfg.host}{path}", - params=self._fix_query_string(query), - json=body, - headers=headers, - files=files, - data=data, - stream=raw, - timeout=self._http_timeout_seconds) - try: - self._record_request_log(response, raw=raw or data is not None or files is not None) - if not response.ok: # internally calls response.raise_for_status() - # TODO: experiment with traceback pruning for better readability - # See https://stackoverflow.com/a/58821552/277035 - payload = response.json() - raise self._make_nicer_error(response=response, **payload) from None - # Private link failures happen via a redirect to the login page. From a requests-perspective, the request - # is successful, but the response is not what we expect. We need to handle this case separately. - if _is_private_link_redirect(response): - raise self._make_nicer_error(response=response) from None - return response - except requests.exceptions.JSONDecodeError: - message = self._make_sense_from_html(response.text) - if not message: - message = response.reason - raise self._make_nicer_error(response=response, message=message) from None - - @staticmethod - def _make_sense_from_html(txt: str) -> str: - matchers = [r'
(.*)
', r'(.*)'] - for attempt in matchers: - expr = re.compile(attempt, re.MULTILINE) - match = expr.search(txt) - if not match: - continue - return match.group(1).strip() - return txt - - def _make_nicer_error(self, *, response: requests.Response, **kwargs) -> DatabricksError: - status_code = response.status_code - message = kwargs.get('message', 'request failed') - is_http_unauthorized_or_forbidden = status_code in (401, 403) - is_too_many_requests_or_unavailable = status_code in (429, 503) - if is_http_unauthorized_or_forbidden: - message = self._cfg.wrap_debug_info(message) - if is_too_many_requests_or_unavailable: - kwargs['retry_after_secs'] = self._parse_retry_after(response) - kwargs['message'] = message - return error_mapper(response, kwargs) - - def _record_request_log(self, response: requests.Response, raw=False): - if not logger.isEnabledFor(logging.DEBUG): - return - request = response.request - url = urllib.parse.urlparse(request.url) - query = '' - if url.query: - query = f'?{urllib.parse.unquote(url.query)}' - sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}'] - if self._cfg.debug_headers: - if self._cfg.host: - sb.append(f'> * Host: {self._cfg.host}') - for k, v in request.headers.items(): - sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}') - if request.body: - sb.append("> [raw stream]" if raw else self._redacted_dump("> ", request.body)) - sb.append(f'< {response.status_code} {response.reason}') - if raw and response.headers.get('Content-Type', None) != 'application/json': - # Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header - sb.append("< [raw stream]") - elif response.content: - sb.append(self._redacted_dump("< ", response.content)) - logger.debug("\n".join(sb)) - - @staticmethod - def _mask(m: Dict[str, any]): - for k in m: - if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}: - m[k] = "**REDACTED**" - - @staticmethod - def _map_keys(m: Dict[str, any]) -> List[str]: - keys = list(m.keys()) - keys.sort() - return keys - - @staticmethod - def _only_n_bytes(j: str, num_bytes: int = 96) -> str: - diff = len(j.encode('utf-8')) - num_bytes - if diff > 0: - return f"{j[:num_bytes]}... ({diff} more bytes)" - return j - - def _recursive_marshal_dict(self, m, budget) -> dict: - out = {} - self._mask(m) - for k in sorted(m.keys()): - raw = self._recursive_marshal(m[k], budget) - out[k] = raw - budget -= len(str(raw)) - return out - - def _recursive_marshal_list(self, s, budget) -> list: - out = [] - for i in range(len(s)): - if i > 0 >= budget: - out.append("... (%d additional elements)" % (len(s) - len(out))) - break - raw = self._recursive_marshal(s[i], budget) - out.append(raw) - budget -= len(str(raw)) - return out - - def _recursive_marshal(self, v: any, budget: int) -> any: - if isinstance(v, dict): - return self._recursive_marshal_dict(v, budget) - elif isinstance(v, list): - return self._recursive_marshal_list(v, budget) - elif isinstance(v, str): - return self._only_n_bytes(v, self._debug_truncate_bytes) - else: - return v - - def _redacted_dump(self, prefix: str, body: str) -> str: - if len(body) == 0: - return "" - try: - # Unmarshal body into primitive types. - tmp = json.loads(body) - max_bytes = 96 - if self._debug_truncate_bytes > max_bytes: - max_bytes = self._debug_truncate_bytes - # Re-marshal body taking redaction and character limit into account. - raw = self._recursive_marshal(tmp, max_bytes) - return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")]) - except JSONDecodeError: - return f'{prefix}[non-JSON document of {len(body)} bytes]' - - -class StreamingResponse(BinaryIO): - _response: requests.Response - _buffer: bytes - _content: Union[Iterator[bytes], None] - _chunk_size: Union[int, None] - _closed: bool = False - - def fileno(self) -> int: - pass - - def flush(self) -> int: - pass - - def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None): - self._response = response - self._buffer = b'' - self._content = None - self._chunk_size = chunk_size - - def _open(self) -> None: - if self._closed: - raise ValueError("I/O operation on closed file") - if not self._content: - self._content = self._response.iter_content(chunk_size=self._chunk_size) - - def __enter__(self) -> BinaryIO: - self._open() - return self - - def set_chunk_size(self, chunk_size: Union[int, None]) -> None: - self._chunk_size = chunk_size - - def close(self) -> None: - self._response.close() - self._closed = True - - def isatty(self) -> bool: - return False - - def read(self, n: int = -1) -> bytes: - self._open() - read_everything = n < 0 - remaining_bytes = n - res = b'' - while remaining_bytes > 0 or read_everything: - if len(self._buffer) == 0: - try: - self._buffer = next(self._content) - except StopIteration: - break - bytes_available = len(self._buffer) - to_read = bytes_available if read_everything else min(remaining_bytes, bytes_available) - res += self._buffer[:to_read] - self._buffer = self._buffer[to_read:] - remaining_bytes -= to_read - return res - - def readable(self) -> bool: - return self._content is not None - - def readline(self, __limit: int = ...) -> bytes: - raise NotImplementedError() - - def readlines(self, __hint: int = ...) -> List[bytes]: - raise NotImplementedError() - - def seek(self, __offset: int, __whence: int = ...) -> int: - raise NotImplementedError() - - def seekable(self) -> bool: - return False - - def tell(self) -> int: - raise NotImplementedError() - - def truncate(self, __size: Union[int, None] = ...) -> int: - raise NotImplementedError() - - def writable(self) -> bool: - return False - - def write(self, s: Union[bytes, bytearray]) -> int: - raise NotImplementedError() - - def writelines(self, lines: Iterable[bytes]) -> None: - raise NotImplementedError() - - def __next__(self) -> bytes: - return self.read(1) - - def __iter__(self) -> Iterator[bytes]: - return self._content + auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None, + response_headers: List[str] = None) -> Union[dict, list, BinaryIO]: + if url is None: + # Remove extra `/` from path for Files API + # Once we've fixed the OpenAPI spec, we can remove this + path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path) + url = f"{self._cfg.host}{path}" + return self._api_client.do(method=method, + url=url, + query=query, + headers=headers, + body=body, + raw=raw, + files=files, + data=data, + auth=auth, + response_headers=response_headers) + + +class _AddDebugErrorCustomizer(_ErrorCustomizer): + """An error customizer that adds debug information about the configuration to unauthenticated and + unauthorized errors.""" + + def __init__(self, cfg: Config): + self._cfg = cfg - def __exit__(self, t: Union[Type[BaseException], None], value: Union[BaseException, None], - traceback: Union[TracebackType, None]) -> None: - self._content = None - self._buffer = b'' - self.close() + def customize_error(self, response: requests.Response, kwargs: dict): + if response.status_code in (401, 403): + message = kwargs.get('message', 'request failed') + kwargs['message'] = self._cfg.wrap_debug_info(message) diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index 50c2eee89..a79151b5a 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -9,14 +9,15 @@ import platform import subprocess import sys +import time from datetime import datetime -from typing import Callable, Dict, List, Optional, Union +from typing import Callable, Dict, List, Optional, Tuple, Union -import google.auth +import google.auth # type: ignore import requests -from google.auth import impersonated_credentials -from google.auth.transport.requests import Request -from google.oauth2 import service_account +from google.auth import impersonated_credentials # type: ignore +from google.auth.transport.requests import Request # type: ignore +from google.oauth2 import service_account # type: ignore from .azure import add_sp_management_token, add_workspace_id_header from .oauth import (ClientCredentials, OAuthClient, Refreshable, Token, @@ -186,30 +187,35 @@ def token() -> Token: def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: if cfg.auth_type != 'external-browser': return None + client_id, client_secret = None, None if cfg.client_id: client_id = cfg.client_id - elif cfg.is_aws: + client_secret = cfg.client_secret + elif cfg.azure_client_id: + client_id = cfg.azure_client + client_secret = cfg.azure_client_secret + + if not client_id: client_id = 'databricks-cli' - elif cfg.is_azure: - # Use Azure AD app for cases when Azure CLI is not available on the machine. - # App has to be registered as Single-page multi-tenant to support PKCE - # TODO: temporary app ID, change it later. - client_id = '6128a518-99a9-425b-8333-4cc94f04cacd' - else: - raise ValueError(f'local browser SSO is not supported') - oauth_client = OAuthClient(host=cfg.host, - client_id=client_id, - redirect_url='http://localhost:8020', - client_secret=cfg.client_secret) # Load cached credentials from disk if they exist. # Note that these are local to the Python SDK and not reused by other SDKs. - token_cache = TokenCache(oauth_client) + oidc_endpoints = cfg.oidc_endpoints + redirect_url = 'http://localhost:8020' + token_cache = TokenCache(host=cfg.host, + oidc_endpoints=oidc_endpoints, + client_id=client_id, + client_secret=client_secret, + redirect_url=redirect_url) credentials = token_cache.load() if credentials: # Force a refresh in case the loaded credentials are expired. credentials.token() else: + oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints, + client_id=client_id, + redirect_url=redirect_url, + client_secret=client_secret) consent = oauth_client.initiate_consent() if not consent: return None @@ -233,8 +239,7 @@ def _ensure_host_present(cfg: 'Config', token_source_for: Callable[[str], TokenS cfg.host = f"https://{resp.json()['properties']['workspaceUrl']}" -@oauth_credentials_strategy('azure-client-secret', - ['is_azure', 'azure_client_id', 'azure_client_secret', 'azure_tenant_id']) +@oauth_credentials_strategy('azure-client-secret', ['is_azure', 'azure_client_id', 'azure_client_secret']) def azure_service_principal(cfg: 'Config') -> CredentialsProvider: """ Adds refreshed Azure Active Directory (AAD) Service Principal OAuth tokens to every request, while automatically resolving different Azure environment endpoints. """ @@ -248,6 +253,7 @@ def token_source_for(resource: str) -> TokenSource: use_params=True) _ensure_host_present(cfg, token_source_for) + cfg.load_azure_tenant_id() logger.info("Configured AAD token for Service Principal (%s)", cfg.azure_client_id) inner = token_source_for(cfg.effective_azure_login_app_id) cloud = token_source_for(cfg.arm_environment.service_management_endpoint) @@ -411,10 +417,7 @@ def _parse_expiry(expiry: str) -> datetime: def refresh(self) -> Token: try: - is_windows = sys.platform.startswith('win') - # windows requires shell=True to be able to execute 'az login' or other commands - # cannot use shell=True all the time, as it breaks macOS - out = subprocess.run(self._cmd, capture_output=True, check=True, shell=is_windows) + out = _run_subprocess(self._cmd, capture_output=True, check=True) it = json.loads(out.stdout.decode()) expires_on = self._parse_expiry(it[self._expiry_field]) return Token(access_token=it[self._access_token_field], @@ -429,19 +432,58 @@ def refresh(self) -> Token: raise IOError(f'cannot get access token: {message}') from e +def _run_subprocess(popenargs, + input=None, + capture_output=True, + timeout=None, + check=False, + **kwargs) -> subprocess.CompletedProcess: + """Runs subprocess with given arguments. + This handles OS-specific modifications that need to be made to the invocation of subprocess.run.""" + kwargs['shell'] = sys.platform.startswith('win') + # windows requires shell=True to be able to execute 'az login' or other commands + # cannot use shell=True all the time, as it breaks macOS + logging.debug(f'Running command: {" ".join(popenargs)}') + return subprocess.run(popenargs, + input=input, + capture_output=capture_output, + timeout=timeout, + check=check, + **kwargs) + + class AzureCliTokenSource(CliTokenSource): """ Obtain the token granted by `az login` CLI command """ - def __init__(self, resource: str, subscription: str = ""): + def __init__(self, resource: str, subscription: Optional[str] = None, tenant: Optional[str] = None): cmd = ["az", "account", "get-access-token", "--resource", resource, "--output", "json"] - if subscription != "": + if subscription is not None: cmd.append("--subscription") cmd.append(subscription) + if tenant and not self.__is_cli_using_managed_identity(): + cmd.extend(["--tenant", tenant]) super().__init__(cmd=cmd, token_type_field='tokenType', access_token_field='accessToken', expiry_field='expiresOn') + @staticmethod + def __is_cli_using_managed_identity() -> bool: + """Checks whether the current CLI session is authenticated using managed identity.""" + try: + cmd = ["az", "account", "show", "--output", "json"] + out = _run_subprocess(cmd, capture_output=True, check=True) + account = json.loads(out.stdout.decode()) + user = account.get("user") + if user is None: + return False + return user.get("type") == "servicePrincipal" and user.get("name") in [ + 'systemAssignedIdentity', 'userAssignedIdentity' + ] + except subprocess.CalledProcessError as e: + logger.debug("Failed to get account information from Azure CLI", exc_info=e) + return False + def is_human_user(self) -> bool: """The UPN claim is the username of the user, but not the Service Principal. @@ -464,8 +506,10 @@ def is_human_user(self) -> bool: @staticmethod def for_resource(cfg: 'Config', resource: str) -> 'AzureCliTokenSource': subscription = AzureCliTokenSource.get_subscription(cfg) - if subscription != "": - token_source = AzureCliTokenSource(resource, subscription) + if subscription is not None: + token_source = AzureCliTokenSource(resource, + subscription=subscription, + tenant=cfg.azure_tenant_id) try: # This will fail if the user has access to the workspace, but not to the subscription # itself. @@ -475,25 +519,26 @@ def for_resource(cfg: 'Config', resource: str) -> 'AzureCliTokenSource': except OSError: logger.warning("Failed to get token for subscription. Using resource only token.") - token_source = AzureCliTokenSource(resource) + token_source = AzureCliTokenSource(resource, subscription=None, tenant=cfg.azure_tenant_id) token_source.token() return token_source @staticmethod - def get_subscription(cfg: 'Config') -> str: + def get_subscription(cfg: 'Config') -> Optional[str]: resource = cfg.azure_workspace_resource_id if resource is None or resource == "": - return "" + return None components = resource.split('/') if len(components) < 3: logger.warning("Invalid azure workspace resource ID") - return "" + return None return components[2] @credentials_strategy('azure-cli', ['is_azure']) def azure_cli(cfg: 'Config') -> Optional[CredentialsProvider]: """ Adds refreshed OAuth token granted by `az login` command to every request. """ + cfg.load_azure_tenant_id() token_source = None mgmt_token_source = None try: @@ -517,11 +562,6 @@ def azure_cli(cfg: 'Config') -> Optional[CredentialsProvider]: _ensure_host_present(cfg, lambda resource: AzureCliTokenSource.for_resource(cfg, resource)) logger.info("Using Azure CLI authentication with AAD tokens") - if not cfg.is_account_client and AzureCliTokenSource.get_subscription(cfg) == "": - logger.warning( - "azure_workspace_resource_id field not provided. " - "It is recommended to specify this field in the Databricks configuration to avoid authentication errors." - ) def inner() -> Dict[str, str]: token = token_source.token() @@ -607,7 +647,10 @@ def inner() -> Dict[str, str]: token = token_source.token() return {'Authorization': f'{token.token_type} {token.access_token}'} - return OAuthCredentialsProvider(inner, token_source.token) + def token() -> Token: + return token_source.token() + + return OAuthCredentialsProvider(inner, token) class MetadataServiceTokenSource(Refreshable): @@ -661,6 +704,90 @@ def inner() -> Dict[str, str]: return inner +# This Code is derived from Mlflow DatabricksModelServingConfigProvider +# https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332 +class ModelServingAuthProvider(): + _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token" + + def __init__(self): + self.expiry_time = -1 + self.current_token = None + self.refresh_duration = 300 # 300 Seconds + + def should_fetch_model_serving_environment_oauth(self) -> bool: + """ + Check whether this is the model serving environment + Additionally check if the oauth token file path exists + """ + + is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV") + or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false") + return (is_in_model_serving_env == "true" + and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH)) + + def get_model_dependency_oauth_token(self, should_retry=True) -> str: + # Use Cached value if it is valid + if self.current_token is not None and self.expiry_time > time.time(): + return self.current_token + + try: + with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f: + oauth_dict = json.load(f) + self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"] + self.expiry_time = time.time() + self.refresh_duration + except Exception as e: + # sleep and retry in case of any race conditions with OAuth refreshing + if should_retry: + logger.warning("Unable to read oauth token on first attmept in Model Serving Environment", + exc_info=e) + time.sleep(0.5) + return self.get_model_dependency_oauth_token(should_retry=False) + else: + raise RuntimeError( + "Unable to read OAuth credentials from the file mounted in Databricks Model Serving" + ) from e + return self.current_token + + def get_databricks_host_token(self) -> Optional[Tuple[str, str]]: + if not self.should_fetch_model_serving_environment_oauth(): + return None + + # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR + host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get( + "DB_MODEL_SERVING_HOST_URL") + token = self.get_model_dependency_oauth_token() + + return (host, token) + + +@credentials_strategy('model-serving', []) +def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: + try: + model_serving_auth_provider = ModelServingAuthProvider() + if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth(): + logger.debug("model-serving: Not in Databricks Model Serving, skipping") + return None + host, token = model_serving_auth_provider.get_databricks_host_token() + if token is None: + raise ValueError( + "Got malformed auth (empty token) when fetching auth implicitly available in Model Serving Environment. Please contact Databricks support" + ) + if cfg.host is None: + cfg.host = host + except Exception as e: + logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e) + return None + + logger.info("Using Databricks Model Serving Authentication") + + def inner() -> Dict[str, str]: + # Call here again to get the refreshed token + _, token = model_serving_auth_provider.get_databricks_host_token() + return {"Authorization": f"Bearer {token}"} + + return inner + + class DefaultCredentials: """ Select the first applicable credential provider from the chain """ @@ -669,7 +796,7 @@ def __init__(self) -> None: self._auth_providers = [ pat_auth, basic_auth, metadata_service, oauth_service_principal, azure_service_principal, github_oidc_azure, azure_cli, external_browser, databricks_cli, runtime_native_auth, - google_credentials, google_id + google_credentials, google_id, model_serving_auth ] def auth_type(self) -> str: diff --git a/databricks/sdk/data_plane.py b/databricks/sdk/data_plane.py new file mode 100644 index 000000000..6f6ddf80c --- /dev/null +++ b/databricks/sdk/data_plane.py @@ -0,0 +1,65 @@ +import threading +from dataclasses import dataclass +from typing import Callable, List + +from databricks.sdk.oauth import Token +from databricks.sdk.service.oauth2 import DataPlaneInfo + + +@dataclass +class DataPlaneDetails: + """ + Contains details required to query a DataPlane endpoint. + """ + endpoint_url: str + """URL used to query the endpoint through the DataPlane.""" + token: Token + """Token to query the DataPlane endpoint.""" + + +class DataPlaneService: + """Helper class to fetch and manage DataPlane details.""" + + def __init__(self): + self._data_plane_info = {} + self._tokens = {} + self._lock = threading.Lock() + + def get_data_plane_details(self, method: str, params: List[str], info_getter: Callable[[], DataPlaneInfo], + refresh: Callable[[str], Token]): + """Get and cache information required to query a Data Plane endpoint using the provided methods. + + Returns a cached DataPlaneDetails if the details have already been fetched previously and are still valid. + If not, it uses the provided functions to fetch the details. + + :param method: method name. Used to construct a unique key for the cache. + :param params: path params used in the "get" operation which uniquely determine the object. Used to construct a unique key for the cache. + :param info_getter: function which returns the DataPlaneInfo. It will only be called if the information is not already present in the cache. + :param refresh: function to refresh the token. It will only be called if the token is missing or expired. + """ + all_elements = params.copy() + all_elements.insert(0, method) + map_key = "/".join(all_elements) + info = self._data_plane_info.get(map_key) + if not info: + self._lock.acquire() + try: + info = self._data_plane_info.get(map_key) + if not info: + info = info_getter() + self._data_plane_info[map_key] = info + finally: + self._lock.release() + + token = self._tokens.get(map_key) + if not token or not token.valid: + self._lock.acquire() + token = self._tokens.get(map_key) + try: + if not token or not token.valid: + token = refresh(info.authorization_details) + self._tokens[map_key] = token + finally: + self._lock.release() + + return DataPlaneDetails(endpoint_url=info.endpoint_url, token=token) diff --git a/databricks/sdk/errors/__init__.py b/databricks/sdk/errors/__init__.py index 749c95116..8ad5ac708 100644 --- a/databricks/sdk/errors/__init__.py +++ b/databricks/sdk/errors/__init__.py @@ -1,5 +1,6 @@ from .base import DatabricksError, ErrorDetail -from .mapper import error_mapper +from .customizer import _ErrorCustomizer +from .parser import _Parser from .platform import * from .private_link import PrivateLinkValidationError from .sdk import * diff --git a/databricks/sdk/errors/base.py b/databricks/sdk/errors/base.py index 89be376b6..973c3644e 100644 --- a/databricks/sdk/errors/base.py +++ b/databricks/sdk/errors/base.py @@ -1,4 +1,5 @@ import re +import warnings from dataclasses import dataclass from typing import Dict, List, Optional @@ -41,9 +42,38 @@ def __init__(self, retry_after_secs: int = None, details: List[Dict[str, any]] = None, **kwargs): + """ + + :param message: + :param error_code: + :param detail: [Deprecated] + :param status: [Deprecated] + :param scimType: [Deprecated] + :param error: [Deprecated] + :param retry_after_secs: + :param details: + :param kwargs: + """ + # SCIM-specific parameters are deprecated + if detail: + warnings.warn( + "The 'detail' parameter of DatabricksError is deprecated and will be removed in a future version." + ) + if scimType: + warnings.warn( + "The 'scimType' parameter of DatabricksError is deprecated and will be removed in a future version." + ) + if status: + warnings.warn( + "The 'status' parameter of DatabricksError is deprecated and will be removed in a future version." + ) + + # API 1.2-specific parameters are deprecated if error: - # API 1.2 has different response format, let's adapt - message = error + warnings.warn( + "The 'error' parameter of DatabricksError is deprecated and will be removed in a future version." + ) + if detail: # Handle SCIM error message details # @see https://tools.ietf.org/html/rfc7644#section-3.7.3 diff --git a/databricks/sdk/errors/customizer.py b/databricks/sdk/errors/customizer.py new file mode 100644 index 000000000..5c895becc --- /dev/null +++ b/databricks/sdk/errors/customizer.py @@ -0,0 +1,50 @@ +import abc +import logging + +import requests + + +class _ErrorCustomizer(abc.ABC): + """A customizer for errors from the Databricks REST API.""" + + @abc.abstractmethod + def customize_error(self, response: requests.Response, kwargs: dict): + """Customize the error constructor parameters.""" + + +class _RetryAfterCustomizer(_ErrorCustomizer): + """An error customizer that sets the retry_after_secs parameter based on the Retry-After header.""" + + _DEFAULT_RETRY_AFTER_SECONDS = 1 + """The default number of seconds to wait before retrying a request if the Retry-After header is missing or is not + a valid integer.""" + + @classmethod + def _parse_retry_after(cls, response: requests.Response) -> int: + retry_after = response.headers.get("Retry-After") + if retry_after is None: + logging.debug( + f'No Retry-After header received in response with status code 429 or 503. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}' + ) + # 429 requests should include a `Retry-After` header, but if it's missing, + # we default to 1 second. + return cls._DEFAULT_RETRY_AFTER_SECONDS + # If the request is throttled, try parse the `Retry-After` header and sleep + # for the specified number of seconds. Note that this header can contain either + # an integer or a RFC1123 datetime string. + # See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + # + # For simplicity, we only try to parse it as an integer, as this is what Databricks + # platform returns. Otherwise, we fall back and don't sleep. + try: + return int(retry_after) + except ValueError: + logging.debug( + f'Invalid Retry-After header received: {retry_after}. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}' + ) + # defaulting to 1 sleep second to make self._is_retryable() simpler + return cls._DEFAULT_RETRY_AFTER_SECONDS + + def customize_error(self, response: requests.Response, kwargs: dict): + if response.status_code in (429, 503): + kwargs['retry_after_secs'] = self._parse_retry_after(response) diff --git a/databricks/sdk/errors/deserializer.py b/databricks/sdk/errors/deserializer.py new file mode 100644 index 000000000..4da01ee68 --- /dev/null +++ b/databricks/sdk/errors/deserializer.py @@ -0,0 +1,106 @@ +import abc +import json +import logging +import re +from typing import Optional + +import requests + + +class _ErrorDeserializer(abc.ABC): + """A parser for errors from the Databricks REST API.""" + + @abc.abstractmethod + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + """Parses an error from the Databricks REST API. If the error cannot be parsed, returns None.""" + + +class _EmptyDeserializer(_ErrorDeserializer): + """A parser that handles empty responses.""" + + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + if len(response_body) == 0: + return {'message': response.reason} + return None + + +class _StandardErrorDeserializer(_ErrorDeserializer): + """ + Parses errors from the Databricks REST API using the standard error format. + """ + + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + try: + payload_str = response_body.decode('utf-8') + resp = json.loads(payload_str) + except UnicodeDecodeError as e: + logging.debug('_StandardErrorParser: unable to decode response using utf-8', exc_info=e) + return None + except json.JSONDecodeError as e: + logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e) + return None + if not isinstance(resp, dict): + logging.debug('_StandardErrorParser: response is valid JSON but not a dictionary') + return None + + error_args = { + 'message': resp.get('message', 'request failed'), + 'error_code': resp.get('error_code'), + 'details': resp.get('details'), + } + + # Handle API 1.2-style errors + if 'error' in resp: + error_args['message'] = resp['error'] + + # Handle SCIM Errors + detail = resp.get('detail') + status = resp.get('status') + scim_type = resp.get('scimType') + if detail: + # Handle SCIM error message details + # @see https://tools.ietf.org/html/rfc7644#section-3.7.3 + if detail == "null": + detail = "SCIM API Internal Error" + error_args['message'] = f"{scim_type} {detail}".strip(" ") + error_args['error_code'] = f"SCIM_{status}" + return error_args + + +class _StringErrorDeserializer(_ErrorDeserializer): + """ + Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE". + """ + + __STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)') + + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + payload_str = response_body.decode('utf-8') + match = self.__STRING_ERROR_REGEX.match(payload_str) + if not match: + logging.debug('_StringErrorParser: unable to parse response as string') + return None + error_code, message = match.groups() + return {'error_code': error_code, 'message': message, 'status': response.status_code, } + + +class _HtmlErrorDeserializer(_ErrorDeserializer): + """ + Parses errors from the Databricks REST API in HTML format. + """ + + __HTML_ERROR_REGEXES = [re.compile(r'
(.*)
'), re.compile(r'(.*)'), ] + + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + payload_str = response_body.decode('utf-8') + for regex in self.__HTML_ERROR_REGEXES: + match = regex.search(payload_str) + if match: + message = match.group(1) if match.group(1) else response.reason + return { + 'status': response.status_code, + 'message': message, + 'error_code': response.reason.upper().replace(' ', '_') + } + logging.debug('_HtmlErrorParser: no
 tag found in error response')
+        return None
diff --git a/databricks/sdk/errors/mapper.py b/databricks/sdk/errors/mapper.py
index 0b809eb7e..282b09c76 100644
--- a/databricks/sdk/errors/mapper.py
+++ b/databricks/sdk/errors/mapper.py
@@ -4,11 +4,9 @@
 from databricks.sdk.errors.base import DatabricksError
 
 from .overrides import _ALL_OVERRIDES
-from .private_link import (_get_private_link_validation_error,
-                           _is_private_link_redirect)
 
 
-def error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
+def _error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
     for override in _ALL_OVERRIDES:
         if override.matches(response, raw):
             return override.custom_error(**raw)
@@ -23,8 +21,6 @@ def error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
         # where there's a default exception class per HTTP status code, and we do
         # rely on Databricks platform exception mapper to do the right thing.
         return platform.STATUS_CODE_MAPPING[status_code](**raw)
-    if _is_private_link_redirect(response):
-        return _get_private_link_validation_error(response.url)
 
     # backwards-compatible error creation for cases like using older versions of
     # the SDK on way never releases of the platform.
diff --git a/databricks/sdk/errors/overrides.py b/databricks/sdk/errors/overrides.py
index 492b2caad..840bdcfcb 100644
--- a/databricks/sdk/errors/overrides.py
+++ b/databricks/sdk/errors/overrides.py
@@ -22,4 +22,12 @@
                    message_matcher=re.compile(r'Job .* does not exist'),
                    custom_error=ResourceDoesNotExist,
                    ),
+    _ErrorOverride(debug_name="Job Runs InvalidParameterValue=>ResourceDoesNotExist",
+                   path_regex=re.compile(r'^/api/2\.\d/jobs/runs/get'),
+                   verb="GET",
+                   status_code_matcher=re.compile(r'^400$'),
+                   error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'),
+                   message_matcher=re.compile(r'(Run .* does not exist|Run: .* in job: .* doesn\'t exist)'),
+                   custom_error=ResourceDoesNotExist,
+                   ),
 ]
diff --git a/databricks/sdk/errors/parser.py b/databricks/sdk/errors/parser.py
new file mode 100644
index 000000000..3408964fe
--- /dev/null
+++ b/databricks/sdk/errors/parser.py
@@ -0,0 +1,83 @@
+import logging
+from typing import List, Optional
+
+import requests
+
+from ..logger import RoundTrip
+from .base import DatabricksError
+from .customizer import _ErrorCustomizer, _RetryAfterCustomizer
+from .deserializer import (_EmptyDeserializer, _ErrorDeserializer,
+                           _HtmlErrorDeserializer, _StandardErrorDeserializer,
+                           _StringErrorDeserializer)
+from .mapper import _error_mapper
+from .private_link import (_get_private_link_validation_error,
+                           _is_private_link_redirect)
+
+# A list of _ErrorDeserializers that are tried in order to parse an API error from a response body. Most errors should
+# be parsable by the _StandardErrorDeserializer, but additional parsers can be added here for specific error formats.
+# The order of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
+_error_deserializers = [
+    _EmptyDeserializer(),
+    _StandardErrorDeserializer(),
+    _StringErrorDeserializer(),
+    _HtmlErrorDeserializer(),
+]
+
+# A list of _ErrorCustomizers that are applied to the error arguments after they are parsed. Customizers can modify the
+# error arguments in any way, including adding or removing fields. Customizers are applied in order, so later
+# customizers can override the changes made by earlier customizers.
+_error_customizers = [_RetryAfterCustomizer(), ]
+
+
+def _unknown_error(response: requests.Response) -> str:
+    """A standard error message that can be shown when an API response cannot be parsed.
+
+    This error message includes a link to the issue tracker for the SDK for users to report the issue to us.
+    """
+    request_log = RoundTrip(response, debug_headers=True, debug_truncate_bytes=10 * 1024).generate()
+    return (
+        'This is likely a bug in the Databricks SDK for Python or the underlying '
+        'API. Please report this issue with the following debugging information to the SDK issue tracker at '
+        f'https://github.com/databricks/databricks-sdk-go/issues. Request log:```{request_log}```')
+
+
+class _Parser:
+    """
+    A parser for errors from the Databricks REST API. It attempts to deserialize an error using a sequence of
+    deserializers, and then customizes the deserialized error using a sequence of customizers. If the error cannot be
+    deserialized, it returns a generic error with debugging information and instructions to report the issue to the SDK
+    issue tracker.
+    """
+
+    def __init__(self,
+                 extra_error_parsers: List[_ErrorDeserializer] = [],
+                 extra_error_customizers: List[_ErrorCustomizer] = []):
+        self._error_parsers = _error_deserializers + (extra_error_parsers
+                                                      if extra_error_parsers is not None else [])
+        self._error_customizers = _error_customizers + (extra_error_customizers
+                                                        if extra_error_customizers is not None else [])
+
+    def get_api_error(self, response: requests.Response) -> Optional[DatabricksError]:
+        """
+        Handles responses from the REST API and returns a DatabricksError if the response indicates an error.
+        :param response: The response from the REST API.
+        :return: A DatabricksError if the response indicates an error, otherwise None.
+        """
+        if not response.ok:
+            content = response.content
+            for parser in self._error_parsers:
+                try:
+                    error_args = parser.deserialize_error(response, content)
+                    if error_args:
+                        for customizer in self._error_customizers:
+                            customizer.customize_error(response, error_args)
+                        return _error_mapper(response, error_args)
+                except Exception as e:
+                    logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
+            return _error_mapper(response,
+                                 {'message': 'unable to parse response. ' + _unknown_error(response)})
+
+        # Private link failures happen via a redirect to the login page. From a requests-perspective, the request
+        # is successful, but the response is not what we expect. We need to handle this case separately.
+        if _is_private_link_redirect(response):
+            return _get_private_link_validation_error(response.url)
diff --git a/databricks/sdk/errors/platform.py b/databricks/sdk/errors/platform.py
index df25fad4b..0d923a75c 100755
--- a/databricks/sdk/errors/platform.py
+++ b/databricks/sdk/errors/platform.py
@@ -47,6 +47,10 @@ class DeadlineExceeded(DatabricksError):
     """the deadline expired before the operation could complete"""
 
 
+class InvalidState(BadRequest):
+    """unexpected state"""
+
+
 class InvalidParameterValue(BadRequest):
     """supplied value for a parameter was invalid"""
 
@@ -99,6 +103,7 @@ class DataLoss(InternalError):
 }
 
 ERROR_CODE_MAPPING = {
+    'INVALID_STATE': InvalidState,
     'INVALID_PARAMETER_VALUE': InvalidParameterValue,
     'RESOURCE_DOES_NOT_EXIST': ResourceDoesNotExist,
     'ABORTED': Aborted,
diff --git a/databricks/sdk/errors/private_link.py b/databricks/sdk/errors/private_link.py
index e8cc5eadf..946b41b50 100644
--- a/databricks/sdk/errors/private_link.py
+++ b/databricks/sdk/errors/private_link.py
@@ -51,7 +51,7 @@ def _is_private_link_redirect(resp: requests.Response) -> bool:
     return parsed.path == '/login.html' and 'error=private-link-validation-error' in parsed.query
 
 
-def _get_private_link_validation_error(url: str) -> _PrivateLinkInfo:
+def _get_private_link_validation_error(url: str) -> PrivateLinkValidationError:
     parsed = parse.urlparse(url)
     env = get_environment_for_hostname(parsed.hostname)
     return PrivateLinkValidationError(message=_private_link_info_map[env.cloud].error_message(),
diff --git a/databricks/sdk/logger/__init__.py b/databricks/sdk/logger/__init__.py
new file mode 100644
index 000000000..f843f05f6
--- /dev/null
+++ b/databricks/sdk/logger/__init__.py
@@ -0,0 +1 @@
+from .round_trip_logger import RoundTrip
diff --git a/databricks/sdk/logger/round_trip_logger.py b/databricks/sdk/logger/round_trip_logger.py
new file mode 100644
index 000000000..1c0a47f08
--- /dev/null
+++ b/databricks/sdk/logger/round_trip_logger.py
@@ -0,0 +1,119 @@
+import json
+import urllib.parse
+from typing import Dict, List
+
+import requests
+
+
+class RoundTrip:
+    """
+    A utility class for converting HTTP requests and responses to strings.
+
+    :param response: The response object to stringify.
+    :param debug_headers: Whether to include headers in the generated string.
+    :param debug_truncate_bytes: The maximum number of bytes to include in the generated string.
+    :param raw: Whether the response is a stream or not. If True, the response will not be logged directly.
+    """
+
+    def __init__(self,
+                 response: requests.Response,
+                 debug_headers: bool,
+                 debug_truncate_bytes: int,
+                 raw=False):
+        self._debug_headers = debug_headers
+        self._debug_truncate_bytes = max(debug_truncate_bytes, 96)
+        self._raw = raw
+        self._response = response
+
+    def generate(self) -> str:
+        """
+        Generate a string representation of the request and response. The string will include the request method, URL,
+        headers, and body, as well as the response status code, reason, headers, and body. Outgoing information
+        will be prefixed with `>`, and incoming information will be prefixed with `<`.
+        :return: A string representation of the request.
+        """
+        request = self._response.request
+        url = urllib.parse.urlparse(request.url)
+        query = ''
+        if url.query:
+            query = f'?{urllib.parse.unquote(url.query)}'
+        sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}']
+        if self._debug_headers:
+            for k, v in request.headers.items():
+                sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}')
+        if request.body:
+            sb.append("> [raw stream]" if self._raw else self._redacted_dump("> ", request.body))
+        sb.append(f'< {self._response.status_code} {self._response.reason}')
+        if self._raw and self._response.headers.get('Content-Type', None) != 'application/json':
+            # Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
+            sb.append("< [raw stream]")
+        elif self._response.content:
+            decoded = self._response.content.decode('utf-8', errors='replace')
+            sb.append(self._redacted_dump("< ", decoded))
+        return '\n'.join(sb)
+
+    @staticmethod
+    def _mask(m: Dict[str, any]):
+        for k in m:
+            if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}:
+                m[k] = "**REDACTED**"
+
+    @staticmethod
+    def _map_keys(m: Dict[str, any]) -> List[str]:
+        keys = list(m.keys())
+        keys.sort()
+        return keys
+
+    @staticmethod
+    def _only_n_bytes(j: str, num_bytes: int = 96) -> str:
+        diff = len(j.encode('utf-8')) - num_bytes
+        if diff > 0:
+            return f"{j[:num_bytes]}... ({diff} more bytes)"
+        return j
+
+    def _recursive_marshal_dict(self, m, budget) -> dict:
+        out = {}
+        self._mask(m)
+        for k in sorted(m.keys()):
+            raw = self._recursive_marshal(m[k], budget)
+            out[k] = raw
+            budget -= len(str(raw))
+        return out
+
+    def _recursive_marshal_list(self, s, budget) -> list:
+        out = []
+        for i in range(len(s)):
+            if i > 0 >= budget:
+                out.append("... (%d additional elements)" % (len(s) - len(out)))
+                break
+            raw = self._recursive_marshal(s[i], budget)
+            out.append(raw)
+            budget -= len(str(raw))
+        return out
+
+    def _recursive_marshal(self, v: any, budget: int) -> any:
+        if isinstance(v, dict):
+            return self._recursive_marshal_dict(v, budget)
+        elif isinstance(v, list):
+            return self._recursive_marshal_list(v, budget)
+        elif isinstance(v, str):
+            return self._only_n_bytes(v, self._debug_truncate_bytes)
+        else:
+            return v
+
+    def _redacted_dump(self, prefix: str, body: str) -> str:
+        if len(body) == 0:
+            return ""
+        try:
+            # Unmarshal body into primitive types.
+            tmp = json.loads(body)
+            max_bytes = 96
+            if self._debug_truncate_bytes > max_bytes:
+                max_bytes = self._debug_truncate_bytes
+            # Re-marshal body taking redaction and character limit into account.
+            raw = self._recursive_marshal(tmp, max_bytes)
+            return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")])
+        except json.JSONDecodeError:
+            to_log = self._only_n_bytes(body, self._debug_truncate_bytes)
+            log_lines = [prefix + x.strip('\r') for x in to_log.split("\n")]
+            return '\n'.join(log_lines)
diff --git a/databricks/sdk/mixins/files.py b/databricks/sdk/mixins/files.py
index 3295aef7a..1e109a1a7 100644
--- a/databricks/sdk/mixins/files.py
+++ b/databricks/sdk/mixins/files.py
@@ -3,6 +3,7 @@
 import base64
 import os
 import pathlib
+import platform
 import shutil
 import sys
 from abc import ABC, abstractmethod
@@ -266,8 +267,9 @@ def __repr__(self) -> str:
 
 class _Path(ABC):
 
-    def __init__(self, path: str):
-        self._path = pathlib.Path(str(path).replace('dbfs:', '').replace('file:', ''))
+    @abstractmethod
+    def __init__(self):
+        ...
 
     @property
     def is_local(self) -> bool:
@@ -327,6 +329,12 @@ def as_string(self) -> str:
 
 class _LocalPath(_Path):
 
+    def __init__(self, path: str):
+        if platform.system() == "Windows":
+            self._path = pathlib.Path(str(path).replace('file:///', '').replace('file:', ''))
+        else:
+            self._path = pathlib.Path(str(path).replace('file:', ''))
+
     def _is_local(self) -> bool:
         return True
 
@@ -393,7 +401,7 @@ def __repr__(self) -> str:
 class _VolumesPath(_Path):
 
     def __init__(self, api: files.FilesAPI, src: Union[str, pathlib.Path]):
-        super().__init__(src)
+        self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', ''))
         self._api = api
 
     def _is_local(self) -> bool:
@@ -462,7 +470,7 @@ def __repr__(self) -> str:
 class _DbfsPath(_Path):
 
     def __init__(self, api: files.DbfsAPI, src: str):
-        super().__init__(src)
+        self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', ''))
         self._api = api
 
     def _is_local(self) -> bool:
diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py
new file mode 100644
index 000000000..f7a8af02d
--- /dev/null
+++ b/databricks/sdk/mixins/open_ai_client.py
@@ -0,0 +1,52 @@
+from databricks.sdk.service.serving import ServingEndpointsAPI
+
+
+class ServingEndpointsExt(ServingEndpointsAPI):
+
+    # Using the HTTP Client to pass in the databricks authorization
+    # This method will be called on every invocation, so when using with model serving will always get the refreshed token
+    def _get_authorized_http_client(self):
+        import httpx
+
+        class BearerAuth(httpx.Auth):
+
+            def __init__(self, get_headers_func):
+                self.get_headers_func = get_headers_func
+
+            def auth_flow(self, request: httpx.Request) -> httpx.Request:
+                auth_headers = self.get_headers_func()
+                request.headers["Authorization"] = auth_headers["Authorization"]
+                yield request
+
+        databricks_token_auth = BearerAuth(self._api._cfg.authenticate)
+
+        # Create an HTTP client with Bearer Token authentication
+        http_client = httpx.Client(auth=databricks_token_auth)
+        return http_client
+
+    def get_open_ai_client(self):
+        try:
+            from openai import OpenAI
+        except Exception:
+            raise ImportError(
+                "Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`"
+            )
+
+        return OpenAI(
+            base_url=self._api._cfg.host + "/serving-endpoints",
+            api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
+            http_client=self._get_authorized_http_client())
+
+    def get_langchain_chat_open_ai_client(self, model):
+        try:
+            from langchain_openai import ChatOpenAI
+        except Exception:
+            raise ImportError(
+                "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7"
+            )
+
+        return ChatOpenAI(
+            model=model,
+            openai_api_base=self._api._cfg.host + "/serving-endpoints",
+            api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
+            http_client=self._get_authorized_http_client())
diff --git a/databricks/sdk/oauth.py b/databricks/sdk/oauth.py
index e9a3afb90..6cac45afc 100644
--- a/databricks/sdk/oauth.py
+++ b/databricks/sdk/oauth.py
@@ -17,6 +17,8 @@
 import requests
 import requests.auth
 
+from ._base_client import _BaseClient, _fix_host_if_needed
+
 # Error code for PKCE flow in Azure Active Directory, that gets additional retry.
 # See https://stackoverflow.com/a/75466778/277035 for more info
 NO_ORIGIN_FOR_SPA_CLIENT_ERROR = 'AADSTS9002327'
@@ -46,8 +48,24 @@ def __call__(self, r):
 
 @dataclass
 class OidcEndpoints:
+    """
+    The endpoints used for OAuth-based authentication in Databricks.
+    """
+
     authorization_endpoint: str # ../v1/authorize
+    """The authorization endpoint for the OAuth flow. The user-agent should be directed to this endpoint in order for
+    the user to login and authorize the client for user-to-machine (U2M) flows."""
+
     token_endpoint: str # ../v1/token
+    """The token endpoint for the OAuth flow."""
+
+    @staticmethod
+    def from_dict(d: dict) -> 'OidcEndpoints':
+        return OidcEndpoints(authorization_endpoint=d.get('authorization_endpoint'),
+                             token_endpoint=d.get('token_endpoint'))
+
+    def as_dict(self) -> dict:
+        return {'authorization_endpoint': self.authorization_endpoint, 'token_endpoint': self.token_endpoint}
 
 
 @dataclass
@@ -220,18 +238,76 @@ def do_GET(self):
         self.wfile.write(b'You can close this tab.')
 
 
+def get_account_endpoints(host: str, account_id: str, client: _BaseClient = _BaseClient()) -> OidcEndpoints:
+    """
+    Get the OIDC endpoints for a given account.
+    :param host: The Databricks account host.
+    :param account_id: The account ID.
+    :return: The account's OIDC endpoints.
+    """
+    host = _fix_host_if_needed(host)
+    oidc = f'{host}/oidc/accounts/{account_id}/.well-known/oauth-authorization-server'
+    resp = client.do('GET', oidc)
+    return OidcEndpoints.from_dict(resp)
+
+
+def get_workspace_endpoints(host: str, client: _BaseClient = _BaseClient()) -> OidcEndpoints:
+    """
+    Get the OIDC endpoints for a given workspace.
+    :param host: The Databricks workspace host.
+    :return: The workspace's OIDC endpoints.
+    """
+    host = _fix_host_if_needed(host)
+    oidc = f'{host}/oidc/.well-known/oauth-authorization-server'
+    resp = client.do('GET', oidc)
+    return OidcEndpoints.from_dict(resp)
+
+
+def get_azure_entra_id_workspace_endpoints(host: str) -> Optional[OidcEndpoints]:
+    """
+    Get the Azure Entra ID endpoints for a given workspace. Can only be used when authenticating to Azure Databricks
+    using an application registered in Azure Entra ID.
+    :param host: The Databricks workspace host.
+    :return: The OIDC endpoints for the workspace's Azure Entra ID tenant.
+    """
+    # In Azure, this workspace endpoint redirects to the Entra ID authorization endpoint
+    host = _fix_host_if_needed(host)
+    res = requests.get(f'{host}/oidc/oauth2/v2.0/authorize', allow_redirects=False)
+    real_auth_url = res.headers.get('location')
+    if not real_auth_url:
+        return None
+    return OidcEndpoints(authorization_endpoint=real_auth_url,
+                         token_endpoint=real_auth_url.replace('/authorize', '/token'))
+
+
 class SessionCredentials(Refreshable):
 
-    def __init__(self, client: 'OAuthClient', token: Token):
-        self._client = client
+    def __init__(self,
+                 token: Token,
+                 token_endpoint: str,
+                 client_id: str,
+                 client_secret: str = None,
+                 redirect_url: str = None):
+        self._token_endpoint = token_endpoint
+        self._client_id = client_id
+        self._client_secret = client_secret
+        self._redirect_url = redirect_url
         super().__init__(token)
 
     def as_dict(self) -> dict:
         return {'token': self._token.as_dict()}
 
     @staticmethod
-    def from_dict(client: 'OAuthClient', raw: dict) -> 'SessionCredentials':
-        return SessionCredentials(client=client, token=Token.from_dict(raw['token']))
+    def from_dict(raw: dict,
+                  token_endpoint: str,
+                  client_id: str,
+                  client_secret: str = None,
+                  redirect_url: str = None) -> 'SessionCredentials':
+        return SessionCredentials(token=Token.from_dict(raw['token']),
+                                  token_endpoint=token_endpoint,
+                                  client_id=client_id,
+                                  client_secret=client_secret,
+                                  redirect_url=redirect_url)
 
     def auth_type(self):
         """Implementing CredentialsProvider protocol"""
@@ -252,13 +328,13 @@ def refresh(self) -> Token:
             raise ValueError('oauth2: token expired and refresh token is not set')
         params = {'grant_type': 'refresh_token', 'refresh_token': refresh_token}
         headers = {}
-        if 'microsoft' in self._client.token_url:
+        if 'microsoft' in self._token_endpoint:
             # Tokens issued for the 'Single-Page Application' client-type may
             # only be redeemed via cross-origin requests
-            headers = {'Origin': self._client.redirect_url}
-        return retrieve_token(client_id=self._client.client_id,
-                              client_secret=self._client.client_secret,
-                              token_url=self._client.token_url,
+            headers = {'Origin': self._redirect_url}
+        return retrieve_token(client_id=self._client_id,
+                              client_secret=self._client_secret,
+                              token_url=self._token_endpoint,
                               params=params,
                               use_params=True,
                               headers=headers)
@@ -266,27 +342,53 @@ def refresh(self) -> Token:
 
 class Consent:
 
-    def __init__(self, client: 'OAuthClient', state: str, verifier: str, auth_url: str = None) -> None:
-        self.auth_url = auth_url
-
+    def __init__(self,
+                 state: str,
+                 verifier: str,
+                 authorization_url: str,
+                 redirect_url: str,
+                 token_endpoint: str,
+                 client_id: str,
+                 client_secret: str = None) -> None:
         self._verifier = verifier
         self._state = state
-        self._client = client
+        self._authorization_url = authorization_url
+        self._redirect_url = redirect_url
+        self._token_endpoint = token_endpoint
+        self._client_id = client_id
+        self._client_secret = client_secret
 
     def as_dict(self) -> dict:
-        return {'state': self._state, 'verifier': self._verifier}
+        return {
+            'state': self._state,
+            'verifier': self._verifier,
+            'authorization_url': self._authorization_url,
+            'redirect_url': self._redirect_url,
+            'token_endpoint': self._token_endpoint,
+            'client_id': self._client_id,
+        }
+
+    @property
+    def authorization_url(self) -> str:
+        return self._authorization_url
 
     @staticmethod
-    def from_dict(client: 'OAuthClient', raw: dict) -> 'Consent':
-        return Consent(client, raw['state'], raw['verifier'])
+    def from_dict(raw: dict, client_secret: str = None) -> 'Consent':
+        return Consent(raw['state'],
+                       raw['verifier'],
+                       authorization_url=raw['authorization_url'],
+                       redirect_url=raw['redirect_url'],
+                       token_endpoint=raw['token_endpoint'],
+                       client_id=raw['client_id'],
+                       client_secret=client_secret)
 
     def launch_external_browser(self) -> SessionCredentials:
-        redirect_url = urllib.parse.urlparse(self._client.redirect_url)
+        redirect_url = urllib.parse.urlparse(self._redirect_url)
         if redirect_url.hostname not in ('localhost', '127.0.0.1'):
             raise ValueError(f'cannot listen on {redirect_url.hostname}')
         feedback = []
-        logger.info(f'Opening {self.auth_url} in a browser')
-        webbrowser.open_new(self.auth_url)
+        logger.info(f'Opening {self._authorization_url} in a browser')
+        webbrowser.open_new(self._authorization_url)
         port = redirect_url.port
         handler_factory = functools.partial(_OAuthCallback, feedback)
         with HTTPServer(("localhost", port), handler_factory) as httpd:
@@ -308,7 +410,7 @@ def exchange(self, code: str, state: str) -> SessionCredentials:
         if self._state != state:
             raise ValueError('state mismatch')
         params = {
-            'redirect_uri': self._client.redirect_url,
+            'redirect_uri': self._redirect_url,
             'grant_type': 'authorization_code',
             'code_verifier': self._verifier,
             'code': code
@@ -316,19 +418,20 @@ def exchange(self, code: str, state: str) -> SessionCredentials:
         headers = {}
         while True:
             try:
-                token = retrieve_token(client_id=self._client.client_id,
-                                       client_secret=self._client.client_secret,
-                                       token_url=self._client.token_url,
+                token = retrieve_token(client_id=self._client_id,
+                                       client_secret=self._client_secret,
+                                       token_url=self._token_endpoint,
                                        params=params,
                                        headers=headers,
                                        use_params=True)
-                return SessionCredentials(self._client, token)
+                return SessionCredentials(token, self._token_endpoint, self._client_id, self._client_secret,
+                                          self._redirect_url)
             except ValueError as e:
                 if NO_ORIGIN_FOR_SPA_CLIENT_ERROR in str(e):
                     # Retry in cases of 'Single-Page Application' client-type with
                     # 'Origin' header equal to client's redirect URL.
-                    headers['Origin'] = self._client.redirect_url
-                    msg = f'Retrying OAuth token exchange with {self._client.redirect_url} origin'
+                    headers['Origin'] = self._redirect_url
+                    msg = f'Retrying OAuth token exchange with {self._redirect_url} origin'
                     logger.debug(msg)
                     continue
                 raise e
@@ -354,13 +457,28 @@ class OAuthClient:
     """
 
     def __init__(self,
-                 host: str,
-                 client_id: str,
+                 oidc_endpoints: OidcEndpoints,
                  redirect_url: str,
-                 *,
+                 client_id: str,
                  scopes: List[str] = None,
                  client_secret: str = None):
-        # TODO: is it a circular dependency?..
+
+        if not scopes:
+            scopes = ['all-apis']
+
+        self.redirect_url = redirect_url
+        self._client_id = client_id
+        self._client_secret = client_secret
+        self._oidc_endpoints = oidc_endpoints
+        self._scopes = scopes
+
+    @staticmethod
+    def from_host(host: str,
+                  client_id: str,
+                  redirect_url: str,
+                  *,
+                  scopes: List[str] = None,
+                  client_secret: str = None) -> 'OAuthClient':
         from .core import Config
         from .credentials_provider import credentials_strategy
 
@@ -374,18 +492,7 @@ def noop_credentials(_: any):
         oidc = config.oidc_endpoints
         if not oidc:
             raise ValueError(f'{host} does not support OAuth')
-
-        self.host = host
-        self.redirect_url = redirect_url
-        self.client_id = client_id
-        self.client_secret = client_secret
-        self.token_url = oidc.token_endpoint
-        self.is_aws = config.is_aws
-        self.is_azure = config.is_azure
-        self.is_gcp = config.is_gcp
-
-        self._auth_url = oidc.authorization_endpoint
-        self._scopes = scopes
+        return OAuthClient(oidc, redirect_url, client_id, scopes, client_secret)
 
     def initiate_consent(self) -> Consent:
         state = secrets.token_urlsafe(16)
@@ -397,18 +504,24 @@ def initiate_consent(self) -> Consent:
 
         params = {
             'response_type': 'code',
-            'client_id': self.client_id,
+            'client_id': self._client_id,
             'redirect_uri': self.redirect_url,
             'scope': ' '.join(self._scopes),
             'state': state,
             'code_challenge': challenge,
             'code_challenge_method': 'S256'
         }
-        url = f'{self._auth_url}?{urllib.parse.urlencode(params)}'
-        return Consent(self, state, verifier, auth_url=url)
+        auth_url = f'{self._oidc_endpoints.authorization_endpoint}?{urllib.parse.urlencode(params)}'
+        return Consent(state,
+                       verifier,
+                       authorization_url=auth_url,
+                       redirect_url=self.redirect_url,
+                       token_endpoint=self._oidc_endpoints.token_endpoint,
+                       client_id=self._client_id,
+                       client_secret=self._client_secret)
 
     def __repr__(self) -> str:
-        return f''
+        return f''
 
 
 @dataclass
@@ -448,17 +561,28 @@ def refresh(self) -> Token:
                               use_header=self.use_header)
 
 
-class TokenCache():
+class TokenCache:
     BASE_PATH = "~/.config/databricks-sdk-py/oauth"
 
-    def __init__(self, client: OAuthClient) -> None:
-        self.client = client
+    def __init__(self,
+                 host: str,
+                 oidc_endpoints: OidcEndpoints,
+                 client_id: str,
+                 redirect_url: str = None,
+                 client_secret: str = None,
+                 scopes: List[str] = None) -> None:
+        self._host = host
+        self._client_id = client_id
+        self._oidc_endpoints = oidc_endpoints
+        self._redirect_url = redirect_url
+        self._client_secret = client_secret
+        self._scopes = scopes or []
 
     @property
     def filename(self) -> str:
         # Include host, client_id, and scopes in the cache filename to make it unique.
         hash = hashlib.sha256()
-        for chunk in [self.client.host, self.client.client_id, ",".join(self.client._scopes), ]:
+        for chunk in [self._host, self._client_id, ",".join(self._scopes), ]:
             hash.update(chunk.encode('utf-8'))
         return os.path.expanduser(os.path.join(self.__class__.BASE_PATH, hash.hexdigest() + ".json"))
 
@@ -472,7 +596,11 @@ def load(self) -> Optional[SessionCredentials]:
         try:
             with open(self.filename, 'r') as f:
                 raw = json.load(f)
-                return SessionCredentials.from_dict(self.client, raw)
+                return SessionCredentials.from_dict(raw,
+                                                    token_endpoint=self._oidc_endpoints.token_endpoint,
+                                                    client_id=self._client_id,
+                                                    client_secret=self._client_secret,
+                                                    redirect_url=self._redirect_url)
         except Exception:
             return None
 
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
new file mode 100755
index 000000000..52796d0e8
--- /dev/null
+++ b/databricks/sdk/service/apps.py
@@ -0,0 +1,1230 @@
+# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+from __future__ import annotations
+
+import logging
+import random
+import time
+from dataclasses import dataclass
+from datetime import timedelta
+from enum import Enum
+from typing import Callable, Dict, Iterator, List, Optional
+
+from ..errors import OperationFailed
+from ._internal import Wait, _enum, _from_dict, _repeated_dict
+
+_LOG = logging.getLogger('databricks.sdk')
+
+# all definitions in this file are in alphabetical order
+
+
+@dataclass
+class App:
+    name: str
+    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
+    It must be unique within the workspace."""
+
+    active_deployment: Optional[AppDeployment] = None
+    """The active deployment of the app. A deployment is considered active when it has been deployed to
+    the app compute."""
+
+    app_status: Optional[ApplicationStatus] = None
+
+    compute_status: Optional[ComputeStatus] = None
+
+    create_time: Optional[str] = None
+    """The creation time of the app. Formatted timestamp in ISO 6801."""
+
+    creator: Optional[str] = None
+    """The email of the user that created the app."""
+
+    default_source_code_path: Optional[str] = None
+    """The default workspace file system path of the source code from which app deployment are created.
+    This field tracks the workspace source code path of the last active deployment."""
+
+    description: Optional[str] = None
+    """The description of the app."""
+
+    pending_deployment: Optional[AppDeployment] = None
+    """The pending deployment of the app. A deployment is considered pending when it is being prepared
+    for deployment to the app compute."""
+
+    resources: Optional[List[AppResource]] = None
+    """Resources for the app."""
+
+    service_principal_id: Optional[int] = None
+
+    service_principal_name: Optional[str] = None
+
+    update_time: Optional[str] = None
+    """The update time of the app. Formatted timestamp in ISO 6801."""
+
+    updater: Optional[str] = None
+    """The email of the user that last updated the app."""
+
+    url: Optional[str] = None
+    """The URL of the app once it is deployed."""
+
+    def as_dict(self) -> dict:
+        """Serializes the App into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict()
+        if self.app_status: body['app_status'] = self.app_status.as_dict()
+        if self.compute_status: body['compute_status'] = self.compute_status.as_dict()
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.creator is not None: body['creator'] = self.creator
+        if self.default_source_code_path is not None:
+            body['default_source_code_path'] = self.default_source_code_path
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict()
+        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
+        if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.updater is not None: body['updater'] = self.updater
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> App:
+        """Deserializes the App from a dictionary."""
+        return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment),
+                   app_status=_from_dict(d, 'app_status', ApplicationStatus),
+                   compute_status=_from_dict(d, 'compute_status', ComputeStatus),
+                   create_time=d.get('create_time', None),
+                   creator=d.get('creator', None),
+                   default_source_code_path=d.get('default_source_code_path', None),
+                   description=d.get('description', None),
+                   name=d.get('name', None),
+                   pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
+                   resources=_repeated_dict(d, 'resources', AppResource),
+                   service_principal_id=d.get('service_principal_id', None),
+                   service_principal_name=d.get('service_principal_name', None),
+                   update_time=d.get('update_time', None),
+                   updater=d.get('updater', None),
+                   url=d.get('url', None))
+
+
+@dataclass
+class AppAccessControlRequest:
+    group_name: Optional[str] = None
+    """name of the group"""
+
+    permission_level: Optional[AppPermissionLevel] = None
+    """Permission level"""
+
+    service_principal_name: Optional[str] = None
+    """application ID of a service principal"""
+
+    user_name: Optional[str] = None
+    """name of the user"""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppAccessControlRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppAccessControlRequest:
+        """Deserializes the AppAccessControlRequest from a dictionary."""
+        return cls(group_name=d.get('group_name', None),
+                   permission_level=_enum(d, 'permission_level', AppPermissionLevel),
+                   service_principal_name=d.get('service_principal_name', None),
+                   user_name=d.get('user_name', None))
+
+
+@dataclass
+class AppAccessControlResponse:
+    all_permissions: Optional[List[AppPermission]] = None
+    """All permissions."""
+
+    display_name: Optional[str] = None
+    """Display name of the user or service principal."""
+
+    group_name: Optional[str] = None
+    """name of the group"""
+
+    service_principal_name: Optional[str] = None
+    """Name of the service principal."""
+
+    user_name: Optional[str] = None
+    """name of the user"""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppAccessControlResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppAccessControlResponse:
+        """Deserializes the AppAccessControlResponse from a dictionary."""
+        return cls(all_permissions=_repeated_dict(d, 'all_permissions', AppPermission),
+                   display_name=d.get('display_name', None),
+                   group_name=d.get('group_name', None),
+                   service_principal_name=d.get('service_principal_name', None),
+                   user_name=d.get('user_name', None))
+
+
+@dataclass
+class AppDeployment:
+    create_time: Optional[str] = None
+    """The creation time of the deployment. Formatted timestamp in ISO 6801."""
+
+    creator: Optional[str] = None
+    """The email of the user creates the deployment."""
+
+    deployment_artifacts: Optional[AppDeploymentArtifacts] = None
+    """The deployment artifacts for an app."""
+
+    deployment_id: Optional[str] = None
+    """The unique id of the deployment."""
+
+    mode: Optional[AppDeploymentMode] = None
+    """The mode of which the deployment will manage the source code."""
+
+    source_code_path: Optional[str] = None
+    """The workspace file system path of the source code used to create the app deployment. This is
+    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
+    app. The former refers to the original source code location of the app in the workspace during
+    deployment creation, whereas the latter provides a system generated stable snapshotted source
+    code path used by the deployment."""
+
+    status: Optional[AppDeploymentStatus] = None
+    """Status and status message of the deployment"""
+
+    update_time: Optional[str] = None
+    """The update time of the deployment. Formatted timestamp in ISO 6801."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.creator is not None: body['creator'] = self.creator
+        if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict()
+        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
+        if self.mode is not None: body['mode'] = self.mode.value
+        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        if self.status: body['status'] = self.status.as_dict()
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppDeployment:
+        """Deserializes the AppDeployment from a dictionary."""
+        return cls(create_time=d.get('create_time', None),
+                   creator=d.get('creator', None),
+                   deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts),
+                   deployment_id=d.get('deployment_id', None),
+                   mode=_enum(d, 'mode', AppDeploymentMode),
+                   source_code_path=d.get('source_code_path', None),
+                   status=_from_dict(d, 'status', AppDeploymentStatus),
+                   update_time=d.get('update_time', None))
+
+
+@dataclass
+class AppDeploymentArtifacts:
+    source_code_path: Optional[str] = None
+    """The snapshotted workspace file system path of the source code loaded by the deployed app."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts:
+        """Deserializes the AppDeploymentArtifacts from a dictionary."""
+        return cls(source_code_path=d.get('source_code_path', None))
+
+
+class AppDeploymentMode(Enum):
+
+    AUTO_SYNC = 'AUTO_SYNC'
+    SNAPSHOT = 'SNAPSHOT'
+
+
+class AppDeploymentState(Enum):
+
+    CANCELLED = 'CANCELLED'
+    FAILED = 'FAILED'
+    IN_PROGRESS = 'IN_PROGRESS'
+    SUCCEEDED = 'SUCCEEDED'
+
+
+@dataclass
+class AppDeploymentStatus:
+    message: Optional[str] = None
+    """Message corresponding with the deployment state."""
+
+    state: Optional[AppDeploymentState] = None
+    """State of the deployment."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus:
+        """Deserializes the AppDeploymentStatus from a dictionary."""
+        return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState))
+
+
+@dataclass
+class AppPermission:
+    inherited: Optional[bool] = None
+
+    inherited_from_object: Optional[List[str]] = None
+
+    permission_level: Optional[AppPermissionLevel] = None
+    """Permission level"""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppPermission into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppPermission:
+        """Deserializes the AppPermission from a dictionary."""
+        return cls(inherited=d.get('inherited', None),
+                   inherited_from_object=d.get('inherited_from_object', None),
+                   permission_level=_enum(d, 'permission_level', AppPermissionLevel))
+
+
+class AppPermissionLevel(Enum):
+    """Permission level"""
+
+    CAN_MANAGE = 'CAN_MANAGE'
+    CAN_USE = 'CAN_USE'
+
+
+@dataclass
+class AppPermissions:
+    access_control_list: Optional[List[AppAccessControlResponse]] = None
+
+    object_id: Optional[str] = None
+
+    object_type: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AppPermissions into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_control_list:
+            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppPermissions:
+        """Deserializes the AppPermissions from a dictionary."""
+        return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlResponse),
+                   object_id=d.get('object_id', None),
+                   object_type=d.get('object_type', None))
+
+
+@dataclass
+class AppPermissionsDescription:
+    description: Optional[str] = None
+
+    permission_level: Optional[AppPermissionLevel] = None
+    """Permission level"""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppPermissionsDescription into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppPermissionsDescription:
+        """Deserializes the AppPermissionsDescription from a dictionary."""
+        return cls(description=d.get('description', None),
+                   permission_level=_enum(d, 'permission_level', AppPermissionLevel))
+
+
+@dataclass
+class AppPermissionsRequest:
+    access_control_list: Optional[List[AppAccessControlRequest]] = None
+
+    app_name: Optional[str] = None
+    """The app for which to get or manage permissions."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppPermissionsRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_control_list:
+            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
+        if self.app_name is not None: body['app_name'] = self.app_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest:
+        """Deserializes the AppPermissionsRequest from a dictionary."""
+        return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlRequest),
+                   app_name=d.get('app_name', None))
+
+
+@dataclass
+class AppResource:
+    name: str
+    """Name of the App Resource."""
+
+    description: Optional[str] = None
+    """Description of the App Resource."""
+
+    job: Optional[AppResourceJob] = None
+
+    secret: Optional[AppResourceSecret] = None
+
+    serving_endpoint: Optional[AppResourceServingEndpoint] = None
+
+    sql_warehouse: Optional[AppResourceSqlWarehouse] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResource into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.job: body['job'] = self.job.as_dict()
+        if self.name is not None: body['name'] = self.name
+        if self.secret: body['secret'] = self.secret.as_dict()
+        if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint.as_dict()
+        if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResource:
+        """Deserializes the AppResource from a dictionary."""
+        return cls(description=d.get('description', None),
+                   job=_from_dict(d, 'job', AppResourceJob),
+                   name=d.get('name', None),
+                   secret=_from_dict(d, 'secret', AppResourceSecret),
+                   serving_endpoint=_from_dict(d, 'serving_endpoint', AppResourceServingEndpoint),
+                   sql_warehouse=_from_dict(d, 'sql_warehouse', AppResourceSqlWarehouse))
+
+
+@dataclass
+class AppResourceJob:
+    id: str
+    """Id of the job to grant permission on."""
+
+    permission: AppResourceJobJobPermission
+    """Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER",
+    "CAN_MANAGE_RUN", "CAN_VIEW"."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResourceJob into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.permission is not None: body['permission'] = self.permission.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResourceJob:
+        """Deserializes the AppResourceJob from a dictionary."""
+        return cls(id=d.get('id', None), permission=_enum(d, 'permission', AppResourceJobJobPermission))
+
+
+class AppResourceJobJobPermission(Enum):
+
+    CAN_MANAGE = 'CAN_MANAGE'
+    CAN_MANAGE_RUN = 'CAN_MANAGE_RUN'
+    CAN_VIEW = 'CAN_VIEW'
+    IS_OWNER = 'IS_OWNER'
+
+
+@dataclass
+class AppResourceSecret:
+    scope: str
+    """Scope of the secret to grant permission on."""
+
+    key: str
+    """Key of the secret to grant permission on."""
+
+    permission: AppResourceSecretSecretPermission
+    """Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission
+    must be one of: "READ", "WRITE", "MANAGE"."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResourceSecret into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.permission is not None: body['permission'] = self.permission.value
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResourceSecret:
+        """Deserializes the AppResourceSecret from a dictionary."""
+        return cls(key=d.get('key', None),
+                   permission=_enum(d, 'permission', AppResourceSecretSecretPermission),
+                   scope=d.get('scope', None))
+
+
+class AppResourceSecretSecretPermission(Enum):
+    """Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE"."""
+
+    MANAGE = 'MANAGE'
+    READ = 'READ'
+    WRITE = 'WRITE'
+
+
+@dataclass
+class AppResourceServingEndpoint:
+    name: str
+    """Name of the serving endpoint to grant permission on."""
+
+    permission: AppResourceServingEndpointServingEndpointPermission
+    """Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE",
+    "CAN_QUERY", "CAN_VIEW"."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResourceServingEndpoint into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.permission is not None: body['permission'] = self.permission.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResourceServingEndpoint:
+        """Deserializes the AppResourceServingEndpoint from a dictionary."""
+        return cls(name=d.get('name', None),
+                   permission=_enum(d, 'permission', AppResourceServingEndpointServingEndpointPermission))
+
+
+class AppResourceServingEndpointServingEndpointPermission(Enum):
+
+    CAN_MANAGE = 'CAN_MANAGE'
+    CAN_QUERY = 'CAN_QUERY'
+    CAN_VIEW = 'CAN_VIEW'
+
+
+@dataclass
+class AppResourceSqlWarehouse:
+    id: str
+    """Id of the SQL warehouse to grant permission on."""
+
+    permission: AppResourceSqlWarehouseSqlWarehousePermission
+    """Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE",
+    "IS_OWNER"."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResourceSqlWarehouse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.permission is not None: body['permission'] = self.permission.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResourceSqlWarehouse:
+        """Deserializes the AppResourceSqlWarehouse from a dictionary."""
+        return cls(id=d.get('id', None),
+                   permission=_enum(d, 'permission', AppResourceSqlWarehouseSqlWarehousePermission))
+
+
+class AppResourceSqlWarehouseSqlWarehousePermission(Enum):
+
+    CAN_MANAGE = 'CAN_MANAGE'
+    CAN_USE = 'CAN_USE'
+    IS_OWNER = 'IS_OWNER'
+
+
+class ApplicationState(Enum):
+
+    CRASHED = 'CRASHED'
+    DEPLOYING = 'DEPLOYING'
+    RUNNING = 'RUNNING'
+    UNAVAILABLE = 'UNAVAILABLE'
+
+
+@dataclass
+class ApplicationStatus:
+    message: Optional[str] = None
+    """Application status message"""
+
+    state: Optional[ApplicationState] = None
+    """State of the application."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ApplicationStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ApplicationStatus:
+        """Deserializes the ApplicationStatus from a dictionary."""
+        return cls(message=d.get('message', None), state=_enum(d, 'state', ApplicationState))
+
+
+class ComputeState(Enum):
+
+    ACTIVE = 'ACTIVE'
+    DELETING = 'DELETING'
+    ERROR = 'ERROR'
+    STARTING = 'STARTING'
+    STOPPED = 'STOPPED'
+    STOPPING = 'STOPPING'
+    UPDATING = 'UPDATING'
+
+
+@dataclass
+class ComputeStatus:
+    message: Optional[str] = None
+    """Compute status message"""
+
+    state: Optional[ComputeState] = None
+    """State of the app compute."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ComputeStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ComputeStatus:
+        """Deserializes the ComputeStatus from a dictionary."""
+        return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState))
+
+
+@dataclass
+class CreateAppDeploymentRequest:
+    app_name: Optional[str] = None
+    """The name of the app."""
+
+    deployment_id: Optional[str] = None
+    """The unique id of the deployment."""
+
+    mode: Optional[AppDeploymentMode] = None
+    """The mode of which the deployment will manage the source code."""
+
+    source_code_path: Optional[str] = None
+    """The workspace file system path of the source code used to create the app deployment. This is
+    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
+    app. The former refers to the original source code location of the app in the workspace during
+    deployment creation, whereas the latter provides a system generated stable snapshotted source
+    code path used by the deployment."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.app_name is not None: body['app_name'] = self.app_name
+        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
+        if self.mode is not None: body['mode'] = self.mode.value
+        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest:
+        """Deserializes the CreateAppDeploymentRequest from a dictionary."""
+        return cls(app_name=d.get('app_name', None),
+                   deployment_id=d.get('deployment_id', None),
+                   mode=_enum(d, 'mode', AppDeploymentMode),
+                   source_code_path=d.get('source_code_path', None))
+
+
+@dataclass
+class CreateAppRequest:
+    name: str
+    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
+    It must be unique within the workspace."""
+
+    description: Optional[str] = None
+    """The description of the app."""
+
+    resources: Optional[List[AppResource]] = None
+    """Resources for the app."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest:
+        """Deserializes the CreateAppRequest from a dictionary."""
+        return cls(description=d.get('description', None),
+                   name=d.get('name', None),
+                   resources=_repeated_dict(d, 'resources', AppResource))
+
+
+@dataclass
+class GetAppPermissionLevelsResponse:
+    permission_levels: Optional[List[AppPermissionsDescription]] = None
+    """Specific permission levels"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GetAppPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetAppPermissionLevelsResponse:
+        """Deserializes the GetAppPermissionLevelsResponse from a dictionary."""
+        return cls(permission_levels=_repeated_dict(d, 'permission_levels', AppPermissionsDescription))
+
+
+@dataclass
+class ListAppDeploymentsResponse:
+    app_deployments: Optional[List[AppDeployment]] = None
+    """Deployment history of the app."""
+
+    next_page_token: Optional[str] = None
+    """Pagination token to request the next page of apps."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse:
+        """Deserializes the ListAppDeploymentsResponse from a dictionary."""
+        return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment),
+                   next_page_token=d.get('next_page_token', None))
+
+
+@dataclass
+class ListAppsResponse:
+    apps: Optional[List[App]] = None
+
+    next_page_token: Optional[str] = None
+    """Pagination token to request the next page of apps."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.apps: body['apps'] = [v.as_dict() for v in self.apps]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse:
+        """Deserializes the ListAppsResponse from a dictionary."""
+        return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None))
+
+
+@dataclass
+class StartAppRequest:
+    name: Optional[str] = None
+    """The name of the app."""
+
+
+@dataclass
+class StopAppRequest:
+    name: Optional[str] = None
+    """The name of the app."""
+
+
+@dataclass
+class UpdateAppRequest:
+    name: str
+    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
+    It must be unique within the workspace."""
+
+    description: Optional[str] = None
+    """The description of the app."""
+
+    resources: Optional[List[AppResource]] = None
+    """Resources for the app."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest:
+        """Deserializes the UpdateAppRequest from a dictionary."""
+        return cls(description=d.get('description', None),
+                   name=d.get('name', None),
+                   resources=_repeated_dict(d, 'resources', AppResource))
+
+
+class AppsAPI:
+    """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
+    Databricks services, and enable users to interact through single sign-on."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def wait_get_app_active(self,
+                            name: str,
+                            timeout=timedelta(minutes=20),
+                            callback: Optional[Callable[[App], None]] = None) -> App:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (ComputeState.ACTIVE, )
+        failure_states = (ComputeState.ERROR, ComputeState.STOPPED, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get(name=name)
+            status = poll.compute_status.state
+            status_message = f'current status: {status}'
+            if poll.compute_status:
+                status_message = poll.compute_status.message
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach ACTIVE, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"name={name}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+
+    def wait_get_app_stopped(self,
+                             name: str,
+                             timeout=timedelta(minutes=20),
+                             callback: Optional[Callable[[App], None]] = None) -> App:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (ComputeState.STOPPED, )
+        failure_states = (ComputeState.ERROR, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get(name=name)
+            status = poll.compute_status.state
+            status_message = f'current status: {status}'
+            if poll.compute_status:
+                status_message = poll.compute_status.message
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach STOPPED, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"name={name}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+
+    def wait_get_deployment_app_succeeded(
+            self,
+            app_name: str,
+            deployment_id: str,
+            timeout=timedelta(minutes=20),
+            callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (AppDeploymentState.SUCCEEDED, )
+        failure_states = (AppDeploymentState.FAILED, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id)
+            status = poll.status.state
+            status_message = f'current status: {status}'
+            if poll.status:
+                status_message = poll.status.message
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach SUCCEEDED, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"app_name={app_name}, deployment_id={deployment_id}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+
+    def create(self,
+               name: str,
+               *,
+               description: Optional[str] = None,
+               resources: Optional[List[AppResource]] = None) -> Wait[App]:
+        """Create an app.
+        
+        Creates a new app.
+        
+        :param name: str
+          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
+          must be unique within the workspace.
+        :param description: str (optional)
+          The description of the app.
+        :param resources: List[:class:`AppResource`] (optional)
+          Resources for the app.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_active for more details.
+        """
+        body = {}
+        if description is not None: body['description'] = description
+        if name is not None: body['name'] = name
+        if resources is not None: body['resources'] = [v.as_dict() for v in resources]
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
+        return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
+
+    def create_and_wait(self,
+                        name: str,
+                        *,
+                        description: Optional[str] = None,
+                        resources: Optional[List[AppResource]] = None,
+                        timeout=timedelta(minutes=20)) -> App:
+        return self.create(description=description, name=name, resources=resources).result(timeout=timeout)
+
+    def delete(self, name: str) -> App:
+        """Delete an app.
+        
+        Deletes an app.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns: :class:`App`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE', f'/api/2.0/apps/{name}', headers=headers)
+        return App.from_dict(res)
+
+    def deploy(self,
+               app_name: str,
+               *,
+               deployment_id: Optional[str] = None,
+               mode: Optional[AppDeploymentMode] = None,
+               source_code_path: Optional[str] = None) -> Wait[AppDeployment]:
+        """Create an app deployment.
+        
+        Creates an app deployment for the app with the supplied name.
+        
+        :param app_name: str
+          The name of the app.
+        :param deployment_id: str (optional)
+          The unique id of the deployment.
+        :param mode: :class:`AppDeploymentMode` (optional)
+          The mode of which the deployment will manage the source code.
+        :param source_code_path: str (optional)
+          The workspace file system path of the source code used to create the app deployment. This is
+          different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
+          The former refers to the original source code location of the app in the workspace during deployment
+          creation, whereas the latter provides a system generated stable snapshotted source code path used by
+          the deployment.
+        
+        :returns:
+          Long-running operation waiter for :class:`AppDeployment`.
+          See :method:wait_get_deployment_app_succeeded for more details.
+        """
+        body = {}
+        if deployment_id is not None: body['deployment_id'] = deployment_id
+        if mode is not None: body['mode'] = mode.value
+        if source_code_path is not None: body['source_code_path'] = source_code_path
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do('POST',
+                                   f'/api/2.0/apps/{app_name}/deployments',
+                                   body=body,
+                                   headers=headers)
+        return Wait(self.wait_get_deployment_app_succeeded,
+                    response=AppDeployment.from_dict(op_response),
+                    app_name=app_name,
+                    deployment_id=op_response['deployment_id'])
+
+    def deploy_and_wait(
+        self,
+        app_name: str,
+        *,
+        deployment_id: Optional[str] = None,
+        mode: Optional[AppDeploymentMode] = None,
+        source_code_path: Optional[str] = None,
+        timeout=timedelta(minutes=20)) -> AppDeployment:
+        return self.deploy(app_name=app_name,
+                           deployment_id=deployment_id,
+                           mode=mode,
+                           source_code_path=source_code_path).result(timeout=timeout)
+
+    def get(self, name: str) -> App:
+        """Get an app.
+        
+        Retrieves information for the app with the supplied name.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns: :class:`App`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/apps/{name}', headers=headers)
+        return App.from_dict(res)
+
+    def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment:
+        """Get an app deployment.
+        
+        Retrieves information for the app deployment with the supplied name and deployment id.
+        
+        :param app_name: str
+          The name of the app.
+        :param deployment_id: str
+          The unique id of the deployment.
+        
+        :returns: :class:`AppDeployment`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/apps/{app_name}/deployments/{deployment_id}', headers=headers)
+        return AppDeployment.from_dict(res)
+
+    def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse:
+        """Get app permission levels.
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        
+        :returns: :class:`GetAppPermissionLevelsResponse`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/permissions/apps/{app_name}/permissionLevels', headers=headers)
+        return GetAppPermissionLevelsResponse.from_dict(res)
+
+    def get_permissions(self, app_name: str) -> AppPermissions:
+        """Get app permissions.
+        
+        Gets the permissions of an app. Apps can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        
+        :returns: :class:`AppPermissions`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/permissions/apps/{app_name}', headers=headers)
+        return AppPermissions.from_dict(res)
+
+    def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]:
+        """List apps.
+        
+        Lists all apps in the workspace.
+        
+        :param page_size: int (optional)
+          Upper bound for items returned.
+        :param page_token: str (optional)
+          Pagination token to go to the next page of apps. Requests first page if absent.
+        
+        :returns: Iterator over :class:`App`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.0/apps', query=query, headers=headers)
+            if 'apps' in json:
+                for v in json['apps']:
+                    yield App.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def list_deployments(self,
+                         app_name: str,
+                         *,
+                         page_size: Optional[int] = None,
+                         page_token: Optional[str] = None) -> Iterator[AppDeployment]:
+        """List app deployments.
+        
+        Lists all app deployments for the app with the supplied name.
+        
+        :param app_name: str
+          The name of the app.
+        :param page_size: int (optional)
+          Upper bound for items returned.
+        :param page_token: str (optional)
+          Pagination token to go to the next page of apps. Requests first page if absent.
+        
+        :returns: Iterator over :class:`AppDeployment`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', f'/api/2.0/apps/{app_name}/deployments', query=query, headers=headers)
+            if 'app_deployments' in json:
+                for v in json['app_deployments']:
+                    yield AppDeployment.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def set_permissions(
+            self,
+            app_name: str,
+            *,
+            access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions:
+        """Set app permissions.
+        
+        Sets permissions on an app. Apps can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
+        
+        :returns: :class:`AppPermissions`
+        """
+        body = {}
+        if access_control_list is not None:
+            body['access_control_list'] = [v.as_dict() for v in access_control_list]
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PUT', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers)
+        return AppPermissions.from_dict(res)
+
+    def start(self, name: str) -> Wait[App]:
+        """Start an app.
+        
+        Start the last active deployment of the app in the workspace.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_active for more details.
+        """
+
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do('POST', f'/api/2.0/apps/{name}/start', headers=headers)
+        return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
+
+    def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
+        return self.start(name=name).result(timeout=timeout)
+
+    def stop(self, name: str) -> Wait[App]:
+        """Stop an app.
+        
+        Stops the active deployment of the app in the workspace.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_stopped for more details.
+        """
+
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do('POST', f'/api/2.0/apps/{name}/stop', headers=headers)
+        return Wait(self.wait_get_app_stopped, response=App.from_dict(op_response), name=op_response['name'])
+
+    def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
+        return self.stop(name=name).result(timeout=timeout)
+
+    def update(self,
+               name: str,
+               *,
+               description: Optional[str] = None,
+               resources: Optional[List[AppResource]] = None) -> App:
+        """Update an app.
+        
+        Updates the app with the supplied name.
+        
+        :param name: str
+          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
+          must be unique within the workspace.
+        :param description: str (optional)
+          The description of the app.
+        :param resources: List[:class:`AppResource`] (optional)
+          Resources for the app.
+        
+        :returns: :class:`App`
+        """
+        body = {}
+        if description is not None: body['description'] = description
+        if resources is not None: body['resources'] = [v.as_dict() for v in resources]
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers)
+        return App.from_dict(res)
+
+    def update_permissions(
+            self,
+            app_name: str,
+            *,
+            access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions:
+        """Update app permissions.
+        
+        Updates the permissions on an app. Apps can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
+        
+        :returns: :class:`AppPermissions`
+        """
+        body = {}
+        if access_control_list is not None:
+            body['access_control_list'] = [v.as_dict() for v in access_control_list]
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers)
+        return AppPermissions.from_dict(res)
diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py
index 1d4a773c6..cfb7ba0b4 100755
--- a/databricks/sdk/service/billing.py
+++ b/databricks/sdk/service/billing.py
@@ -15,204 +15,412 @@
 
 
 @dataclass
-class Budget:
-    """Budget configuration to be created."""
+class ActionConfiguration:
+    action_configuration_id: Optional[str] = None
+    """Databricks action configuration ID."""
 
-    name: str
-    """Human-readable name of the budget."""
+    action_type: Optional[ActionConfigurationType] = None
+    """The type of the action."""
 
-    period: str
-    """Period length in years, months, weeks and/or days. Examples: `1 month`, `30 days`, `1 year, 2
-    months, 1 week, 2 days`"""
+    target: Optional[str] = None
+    """Target for the action. For example, an email address."""
 
-    start_date: str
-    """Start date of the budget period calculation."""
+    def as_dict(self) -> dict:
+        """Serializes the ActionConfiguration into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.action_configuration_id is not None:
+            body['action_configuration_id'] = self.action_configuration_id
+        if self.action_type is not None: body['action_type'] = self.action_type.value
+        if self.target is not None: body['target'] = self.target
+        return body
 
-    target_amount: str
-    """Target amount of the budget per period in USD."""
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ActionConfiguration:
+        """Deserializes the ActionConfiguration from a dictionary."""
+        return cls(action_configuration_id=d.get('action_configuration_id', None),
+                   action_type=_enum(d, 'action_type', ActionConfigurationType),
+                   target=d.get('target', None))
 
-    filter: str
-    """SQL-like filter expression with workspaceId, SKU and tag. Usage in your account that matches
-    this expression will be counted in this budget.
-    
-    Supported properties on left-hand side of comparison: * `workspaceId` - the ID of the workspace
-    * `sku` - SKU of the cluster, e.g. `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag
-    name'` - tag of the cluster
-    
-    Supported comparison operators: * `=` - equal * `!=` - not equal
-    
-    Supported logical operators: `AND`, `OR`.
-    
-    Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND tag.'my tag'='my
-    value')` * `workspaceId!=456` * `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR
-    sku='PREMIUM_ALL_PURPOSE_COMPUTE'` * `tag.name1='value1' AND tag.name2='value2'`"""
 
-    alerts: Optional[List[BudgetAlert]] = None
+class ActionConfigurationType(Enum):
+
+    EMAIL_NOTIFICATION = 'EMAIL_NOTIFICATION'
+
+
+@dataclass
+class AlertConfiguration:
+    action_configurations: Optional[List[ActionConfiguration]] = None
+    """Configured actions for this alert. These define what happens when an alert enters a triggered
+    state."""
+
+    alert_configuration_id: Optional[str] = None
+    """Databricks alert configuration ID."""
+
+    quantity_threshold: Optional[str] = None
+    """The threshold for the budget alert to determine if it is in a triggered state. The number is
+    evaluated based on `quantity_type`."""
+
+    quantity_type: Optional[AlertConfigurationQuantityType] = None
+    """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured
+    in."""
+
+    time_period: Optional[AlertConfigurationTimePeriod] = None
+    """The time window of usage data for the budget."""
 
-    end_date: Optional[str] = None
-    """Optional end date of the budget."""
+    trigger_type: Optional[AlertConfigurationTriggerType] = None
+    """The evaluation method to determine when this budget alert is in a triggered state."""
 
     def as_dict(self) -> dict:
-        """Serializes the Budget into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AlertConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alerts: body['alerts'] = [v.as_dict() for v in self.alerts]
-        if self.end_date is not None: body['end_date'] = self.end_date
-        if self.filter is not None: body['filter'] = self.filter
-        if self.name is not None: body['name'] = self.name
-        if self.period is not None: body['period'] = self.period
-        if self.start_date is not None: body['start_date'] = self.start_date
-        if self.target_amount is not None: body['target_amount'] = self.target_amount
+        if self.action_configurations:
+            body['action_configurations'] = [v.as_dict() for v in self.action_configurations]
+        if self.alert_configuration_id is not None:
+            body['alert_configuration_id'] = self.alert_configuration_id
+        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
+        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value
+        if self.time_period is not None: body['time_period'] = self.time_period.value
+        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> Budget:
-        """Deserializes the Budget from a dictionary."""
-        return cls(alerts=_repeated_dict(d, 'alerts', BudgetAlert),
-                   end_date=d.get('end_date', None),
-                   filter=d.get('filter', None),
-                   name=d.get('name', None),
-                   period=d.get('period', None),
-                   start_date=d.get('start_date', None),
-                   target_amount=d.get('target_amount', None))
+    def from_dict(cls, d: Dict[str, any]) -> AlertConfiguration:
+        """Deserializes the AlertConfiguration from a dictionary."""
+        return cls(action_configurations=_repeated_dict(d, 'action_configurations', ActionConfiguration),
+                   alert_configuration_id=d.get('alert_configuration_id', None),
+                   quantity_threshold=d.get('quantity_threshold', None),
+                   quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType),
+                   time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod),
+                   trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType))
+
+
+class AlertConfigurationQuantityType(Enum):
+
+    LIST_PRICE_DOLLARS_USD = 'LIST_PRICE_DOLLARS_USD'
+
+
+class AlertConfigurationTimePeriod(Enum):
+
+    MONTH = 'MONTH'
+
+
+class AlertConfigurationTriggerType(Enum):
+
+    CUMULATIVE_SPENDING_EXCEEDED = 'CUMULATIVE_SPENDING_EXCEEDED'
 
 
 @dataclass
-class BudgetAlert:
-    email_notifications: Optional[List[str]] = None
-    """List of email addresses to be notified when budget percentage is exceeded in the given period."""
+class BudgetConfiguration:
+    account_id: Optional[str] = None
+    """Databricks account ID."""
+
+    alert_configurations: Optional[List[AlertConfiguration]] = None
+    """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one
+    alert configuration."""
+
+    budget_configuration_id: Optional[str] = None
+    """Databricks budget configuration ID."""
+
+    create_time: Optional[int] = None
+    """Creation time of this budget configuration."""
+
+    display_name: Optional[str] = None
+    """Human-readable name of budget configuration. Max Length: 128"""
+
+    filter: Optional[BudgetConfigurationFilter] = None
+    """Configured filters for this budget. These are applied to your account's usage to limit the scope
+    of what is considered for this budget. Leave empty to include all usage for this account. All
+    provided filters must be matched for usage to be included."""
 
-    min_percentage: Optional[int] = None
-    """Percentage of the target amount used in the currect period that will trigger a notification."""
+    update_time: Optional[int] = None
+    """Update time of this budget configuration."""
 
     def as_dict(self) -> dict:
-        """Serializes the BudgetAlert into a dictionary suitable for use as a JSON request body."""
+        """Serializes the BudgetConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.email_notifications: body['email_notifications'] = [v for v in self.email_notifications]
-        if self.min_percentage is not None: body['min_percentage'] = self.min_percentage
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations:
+            body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations]
+        if self.budget_configuration_id is not None:
+            body['budget_configuration_id'] = self.budget_configuration_id
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter.as_dict()
+        if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> BudgetAlert:
-        """Deserializes the BudgetAlert from a dictionary."""
-        return cls(email_notifications=d.get('email_notifications', None),
-                   min_percentage=d.get('min_percentage', None))
+    def from_dict(cls, d: Dict[str, any]) -> BudgetConfiguration:
+        """Deserializes the BudgetConfiguration from a dictionary."""
+        return cls(account_id=d.get('account_id', None),
+                   alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration),
+                   budget_configuration_id=d.get('budget_configuration_id', None),
+                   create_time=d.get('create_time', None),
+                   display_name=d.get('display_name', None),
+                   filter=_from_dict(d, 'filter', BudgetConfigurationFilter),
+                   update_time=d.get('update_time', None))
 
 
 @dataclass
-class BudgetList:
-    """List of budgets."""
+class BudgetConfigurationFilter:
+    tags: Optional[List[BudgetConfigurationFilterTagClause]] = None
+    """A list of tag keys and values that will limit the budget to usage that includes those specific
+    custom tags. Tags are case-sensitive and should be entered exactly as they appear in your usage
+    data."""
 
-    budgets: Optional[List[BudgetWithStatus]] = None
+    workspace_id: Optional[BudgetConfigurationFilterWorkspaceIdClause] = None
+    """If provided, usage must match with the provided Databricks workspace IDs."""
 
     def as_dict(self) -> dict:
-        """Serializes the BudgetList into a dictionary suitable for use as a JSON request body."""
+        """Serializes the BudgetConfigurationFilter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets]
+        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict()
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> BudgetList:
-        """Deserializes the BudgetList from a dictionary."""
-        return cls(budgets=_repeated_dict(d, 'budgets', BudgetWithStatus))
+    def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilter:
+        """Deserializes the BudgetConfigurationFilter from a dictionary."""
+        return cls(tags=_repeated_dict(d, 'tags', BudgetConfigurationFilterTagClause),
+                   workspace_id=_from_dict(d, 'workspace_id', BudgetConfigurationFilterWorkspaceIdClause))
 
 
 @dataclass
-class BudgetWithStatus:
-    """Budget configuration with daily status."""
+class BudgetConfigurationFilterClause:
+    operator: Optional[BudgetConfigurationFilterOperator] = None
 
-    alerts: Optional[List[BudgetAlert]] = None
+    values: Optional[List[str]] = None
 
-    budget_id: Optional[str] = None
+    def as_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilterClause into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.operator is not None: body['operator'] = self.operator.value
+        if self.values: body['values'] = [v for v in self.values]
+        return body
 
-    creation_time: Optional[str] = None
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterClause:
+        """Deserializes the BudgetConfigurationFilterClause from a dictionary."""
+        return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator),
+                   values=d.get('values', None))
 
-    end_date: Optional[str] = None
-    """Optional end date of the budget."""
 
-    filter: Optional[str] = None
-    """SQL-like filter expression with workspaceId, SKU and tag. Usage in your account that matches
-    this expression will be counted in this budget.
-    
-    Supported properties on left-hand side of comparison: * `workspaceId` - the ID of the workspace
-    * `sku` - SKU of the cluster, e.g. `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag
-    name'` - tag of the cluster
-    
-    Supported comparison operators: * `=` - equal * `!=` - not equal
-    
-    Supported logical operators: `AND`, `OR`.
-    
-    Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND tag.'my tag'='my
-    value')` * `workspaceId!=456` * `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR
-    sku='PREMIUM_ALL_PURPOSE_COMPUTE'` * `tag.name1='value1' AND tag.name2='value2'`"""
+class BudgetConfigurationFilterOperator(Enum):
 
-    name: Optional[str] = None
-    """Human-readable name of the budget."""
+    IN = 'IN'
 
-    period: Optional[str] = None
-    """Period length in years, months, weeks and/or days. Examples: `1 month`, `30 days`, `1 year, 2
-    months, 1 week, 2 days`"""
 
-    start_date: Optional[str] = None
-    """Start date of the budget period calculation."""
+@dataclass
+class BudgetConfigurationFilterTagClause:
+    key: Optional[str] = None
 
-    status_daily: Optional[List[BudgetWithStatusStatusDailyItem]] = None
-    """Amount used in the budget for each day (noncumulative)."""
+    value: Optional[BudgetConfigurationFilterClause] = None
 
-    target_amount: Optional[str] = None
-    """Target amount of the budget per period in USD."""
+    def as_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilterTagClause into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value: body['value'] = self.value.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterTagClause:
+        """Deserializes the BudgetConfigurationFilterTagClause from a dictionary."""
+        return cls(key=d.get('key', None), value=_from_dict(d, 'value', BudgetConfigurationFilterClause))
+
+
+@dataclass
+class BudgetConfigurationFilterWorkspaceIdClause:
+    operator: Optional[BudgetConfigurationFilterOperator] = None
 
-    update_time: Optional[str] = None
+    values: Optional[List[int]] = None
 
     def as_dict(self) -> dict:
-        """Serializes the BudgetWithStatus into a dictionary suitable for use as a JSON request body."""
+        """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alerts: body['alerts'] = [v.as_dict() for v in self.alerts]
-        if self.budget_id is not None: body['budget_id'] = self.budget_id
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.end_date is not None: body['end_date'] = self.end_date
-        if self.filter is not None: body['filter'] = self.filter
-        if self.name is not None: body['name'] = self.name
-        if self.period is not None: body['period'] = self.period
-        if self.start_date is not None: body['start_date'] = self.start_date
-        if self.status_daily: body['status_daily'] = [v.as_dict() for v in self.status_daily]
-        if self.target_amount is not None: body['target_amount'] = self.target_amount
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.operator is not None: body['operator'] = self.operator.value
+        if self.values: body['values'] = [v for v in self.values]
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> BudgetWithStatus:
-        """Deserializes the BudgetWithStatus from a dictionary."""
-        return cls(alerts=_repeated_dict(d, 'alerts', BudgetAlert),
-                   budget_id=d.get('budget_id', None),
-                   creation_time=d.get('creation_time', None),
-                   end_date=d.get('end_date', None),
-                   filter=d.get('filter', None),
-                   name=d.get('name', None),
-                   period=d.get('period', None),
-                   start_date=d.get('start_date', None),
-                   status_daily=_repeated_dict(d, 'status_daily', BudgetWithStatusStatusDailyItem),
-                   target_amount=d.get('target_amount', None),
-                   update_time=d.get('update_time', None))
+    def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdClause:
+        """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary."""
+        return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator),
+                   values=d.get('values', None))
+
+
+@dataclass
+class CreateBillingUsageDashboardRequest:
+    dashboard_type: Optional[UsageDashboardType] = None
+    """Workspace level usage dashboard shows usage data for the specified workspace ID. Global level
+    usage dashboard shows usage data for all workspaces in the account."""
+
+    workspace_id: Optional[int] = None
+    """The workspace ID of the workspace in which the usage dashboard is created."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateBillingUsageDashboardRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type.value
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardRequest:
+        """Deserializes the CreateBillingUsageDashboardRequest from a dictionary."""
+        return cls(dashboard_type=_enum(d, 'dashboard_type', UsageDashboardType),
+                   workspace_id=d.get('workspace_id', None))
+
+
+@dataclass
+class CreateBillingUsageDashboardResponse:
+    dashboard_id: Optional[str] = None
+    """The unique id of the usage dashboard."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardResponse:
+        """Deserializes the CreateBillingUsageDashboardResponse from a dictionary."""
+        return cls(dashboard_id=d.get('dashboard_id', None))
+
+
+@dataclass
+class CreateBudgetConfigurationBudget:
+    account_id: Optional[str] = None
+    """Databricks account ID."""
+
+    alert_configurations: Optional[List[CreateBudgetConfigurationBudgetAlertConfigurations]] = None
+    """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one
+    alert configuration."""
+
+    display_name: Optional[str] = None
+    """Human-readable name of budget configuration. Max Length: 128"""
+
+    filter: Optional[BudgetConfigurationFilter] = None
+    """Configured filters for this budget. These are applied to your account's usage to limit the scope
+    of what is considered for this budget. Leave empty to include all usage for this account. All
+    provided filters must be matched for usage to be included."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations:
+            body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations]
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudget:
+        """Deserializes the CreateBudgetConfigurationBudget from a dictionary."""
+        return cls(account_id=d.get('account_id', None),
+                   alert_configurations=_repeated_dict(d, 'alert_configurations',
+                                                       CreateBudgetConfigurationBudgetAlertConfigurations),
+                   display_name=d.get('display_name', None),
+                   filter=_from_dict(d, 'filter', BudgetConfigurationFilter))
+
+
+@dataclass
+class CreateBudgetConfigurationBudgetActionConfigurations:
+    action_type: Optional[ActionConfigurationType] = None
+    """The type of the action."""
+
+    target: Optional[str] = None
+    """Target for the action. For example, an email address."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.action_type is not None: body['action_type'] = self.action_type.value
+        if self.target is not None: body['target'] = self.target
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetActionConfigurations:
+        """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary."""
+        return cls(action_type=_enum(d, 'action_type', ActionConfigurationType), target=d.get('target', None))
 
 
 @dataclass
-class BudgetWithStatusStatusDailyItem:
-    amount: Optional[str] = None
-    """Amount used in this day in USD."""
+class CreateBudgetConfigurationBudgetAlertConfigurations:
+    action_configurations: Optional[List[CreateBudgetConfigurationBudgetActionConfigurations]] = None
+    """Configured actions for this alert. These define what happens when an alert enters a triggered
+    state."""
+
+    quantity_threshold: Optional[str] = None
+    """The threshold for the budget alert to determine if it is in a triggered state. The number is
+    evaluated based on `quantity_type`."""
 
-    date: Optional[str] = None
+    quantity_type: Optional[AlertConfigurationQuantityType] = None
+    """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured
+    in."""
+
+    time_period: Optional[AlertConfigurationTimePeriod] = None
+    """The time window of usage data for the budget."""
+
+    trigger_type: Optional[AlertConfigurationTriggerType] = None
+    """The evaluation method to determine when this budget alert is in a triggered state."""
 
     def as_dict(self) -> dict:
-        """Serializes the BudgetWithStatusStatusDailyItem into a dictionary suitable for use as a JSON request body."""
+        """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.amount is not None: body['amount'] = self.amount
-        if self.date is not None: body['date'] = self.date
+        if self.action_configurations:
+            body['action_configurations'] = [v.as_dict() for v in self.action_configurations]
+        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
+        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value
+        if self.time_period is not None: body['time_period'] = self.time_period.value
+        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> BudgetWithStatusStatusDailyItem:
-        """Deserializes the BudgetWithStatusStatusDailyItem from a dictionary."""
-        return cls(amount=d.get('amount', None), date=d.get('date', None))
+    def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetAlertConfigurations:
+        """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary."""
+        return cls(action_configurations=_repeated_dict(d, 'action_configurations',
+                                                        CreateBudgetConfigurationBudgetActionConfigurations),
+                   quantity_threshold=d.get('quantity_threshold', None),
+                   quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType),
+                   time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod),
+                   trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType))
+
+
+@dataclass
+class CreateBudgetConfigurationRequest:
+    budget: CreateBudgetConfigurationBudget
+    """Properties of the new budget configuration."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.budget: body['budget'] = self.budget.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationRequest:
+        """Deserializes the CreateBudgetConfigurationRequest from a dictionary."""
+        return cls(budget=_from_dict(d, 'budget', CreateBudgetConfigurationBudget))
+
+
+@dataclass
+class CreateBudgetConfigurationResponse:
+    budget: Optional[BudgetConfiguration] = None
+    """The created budget configuration."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.budget: body['budget'] = self.budget.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationResponse:
+        """Deserializes the CreateBudgetConfigurationResponse from a dictionary."""
+        return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
 
 
 @dataclass
@@ -316,16 +524,16 @@ def from_dict(cls, d: Dict[str, any]) -> CreateLogDeliveryConfigurationParams:
 
 
 @dataclass
-class DeleteResponse:
+class DeleteBudgetConfigurationResponse:
 
     def as_dict(self) -> dict:
-        """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the DeleteBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
-        """Deserializes the DeleteResponse from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse:
+        """Deserializes the DeleteBudgetConfigurationResponse from a dictionary."""
         return cls()
 
 
@@ -361,6 +569,65 @@ def from_dict(cls, d: Dict[str, any]) -> DownloadResponse:
         return cls(contents=d.get('contents', None))
 
 
+@dataclass
+class GetBillingUsageDashboardResponse:
+    dashboard_id: Optional[str] = None
+    """The unique id of the usage dashboard."""
+
+    dashboard_url: Optional[str] = None
+    """The URL of the usage dashboard."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GetBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetBillingUsageDashboardResponse:
+        """Deserializes the GetBillingUsageDashboardResponse from a dictionary."""
+        return cls(dashboard_id=d.get('dashboard_id', None), dashboard_url=d.get('dashboard_url', None))
+
+
+@dataclass
+class GetBudgetConfigurationResponse:
+    budget: Optional[BudgetConfiguration] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the GetBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.budget: body['budget'] = self.budget.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetBudgetConfigurationResponse:
+        """Deserializes the GetBudgetConfigurationResponse from a dictionary."""
+        return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
+
+
+@dataclass
+class ListBudgetConfigurationsResponse:
+    budgets: Optional[List[BudgetConfiguration]] = None
+
+    next_page_token: Optional[str] = None
+    """Token which can be sent as `page_token` to retrieve the next page of results. If this field is
+    omitted, there are no subsequent budgets."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListBudgetConfigurationsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse:
+        """Deserializes the ListBudgetConfigurationsResponse from a dictionary."""
+        return cls(budgets=_repeated_dict(d, 'budgets', BudgetConfiguration),
+                   next_page_token=d.get('next_page_token', None))
+
+
 class LogDeliveryConfigStatus(Enum):
     """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled).
     Defaults to `ENABLED`. You can [enable or disable the
@@ -586,81 +853,116 @@ def from_dict(cls, d: Dict[str, any]) -> PatchStatusResponse:
 
 
 @dataclass
-class UpdateLogDeliveryConfigurationStatusRequest:
-    status: LogDeliveryConfigStatus
-    """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled).
-    Defaults to `ENABLED`. You can [enable or disable the
-    configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration
-    is not supported, so disable a log delivery configuration that is no longer needed."""
+class UpdateBudgetConfigurationBudget:
+    account_id: Optional[str] = None
+    """Databricks account ID."""
 
-    log_delivery_configuration_id: Optional[str] = None
-    """Databricks log delivery configuration ID"""
+    alert_configurations: Optional[List[AlertConfiguration]] = None
+    """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one
+    alert configuration."""
+
+    budget_configuration_id: Optional[str] = None
+    """Databricks budget configuration ID."""
+
+    display_name: Optional[str] = None
+    """Human-readable name of budget configuration. Max Length: 128"""
+
+    filter: Optional[BudgetConfigurationFilter] = None
+    """Configured filters for this budget. These are applied to your account's usage to limit the scope
+    of what is considered for this budget. Leave empty to include all usage for this account. All
+    provided filters must be matched for usage to be included."""
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.log_delivery_configuration_id is not None:
-            body['log_delivery_configuration_id'] = self.log_delivery_configuration_id
-        if self.status is not None: body['status'] = self.status.value
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations:
+            body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations]
+        if self.budget_configuration_id is not None:
+            body['budget_configuration_id'] = self.budget_configuration_id
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest:
-        """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary."""
-        return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None),
-                   status=_enum(d, 'status', LogDeliveryConfigStatus))
+    def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationBudget:
+        """Deserializes the UpdateBudgetConfigurationBudget from a dictionary."""
+        return cls(account_id=d.get('account_id', None),
+                   alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration),
+                   budget_configuration_id=d.get('budget_configuration_id', None),
+                   display_name=d.get('display_name', None),
+                   filter=_from_dict(d, 'filter', BudgetConfigurationFilter))
 
 
 @dataclass
-class UpdateResponse:
+class UpdateBudgetConfigurationRequest:
+    budget: UpdateBudgetConfigurationBudget
+    """The updated budget. This will overwrite the budget specified by the budget ID."""
+
+    budget_id: Optional[str] = None
+    """The Databricks budget configuration ID."""
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.budget: body['budget'] = self.budget.as_dict()
+        if self.budget_id is not None: body['budget_id'] = self.budget_id
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
-        """Deserializes the UpdateResponse from a dictionary."""
-        return cls()
+    def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationRequest:
+        """Deserializes the UpdateBudgetConfigurationRequest from a dictionary."""
+        return cls(budget=_from_dict(d, 'budget', UpdateBudgetConfigurationBudget),
+                   budget_id=d.get('budget_id', None))
 
 
 @dataclass
-class WrappedBudget:
-    budget: Budget
-    """Budget configuration to be created."""
-
-    budget_id: Optional[str] = None
-    """Budget ID"""
+class UpdateBudgetConfigurationResponse:
+    budget: Optional[BudgetConfiguration] = None
+    """The updated budget."""
 
     def as_dict(self) -> dict:
-        """Serializes the WrappedBudget into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.budget: body['budget'] = self.budget.as_dict()
-        if self.budget_id is not None: body['budget_id'] = self.budget_id
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> WrappedBudget:
-        """Deserializes the WrappedBudget from a dictionary."""
-        return cls(budget=_from_dict(d, 'budget', Budget), budget_id=d.get('budget_id', None))
+    def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationResponse:
+        """Deserializes the UpdateBudgetConfigurationResponse from a dictionary."""
+        return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
 
 
 @dataclass
-class WrappedBudgetWithStatus:
-    budget: BudgetWithStatus
-    """Budget configuration with daily status."""
+class UpdateLogDeliveryConfigurationStatusRequest:
+    status: LogDeliveryConfigStatus
+    """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled).
+    Defaults to `ENABLED`. You can [enable or disable the
+    configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration
+    is not supported, so disable a log delivery configuration that is no longer needed."""
+
+    log_delivery_configuration_id: Optional[str] = None
+    """Databricks log delivery configuration ID"""
 
     def as_dict(self) -> dict:
-        """Serializes the WrappedBudgetWithStatus into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budget: body['budget'] = self.budget.as_dict()
+        if self.log_delivery_configuration_id is not None:
+            body['log_delivery_configuration_id'] = self.log_delivery_configuration_id
+        if self.status is not None: body['status'] = self.status.value
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> WrappedBudgetWithStatus:
-        """Deserializes the WrappedBudgetWithStatus from a dictionary."""
-        return cls(budget=_from_dict(d, 'budget', BudgetWithStatus))
+    def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest:
+        """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary."""
+        return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None),
+                   status=_enum(d, 'status', LogDeliveryConfigStatus))
+
+
+class UsageDashboardType(Enum):
+
+    USAGE_DASHBOARD_TYPE_GLOBAL = 'USAGE_DASHBOARD_TYPE_GLOBAL'
+    USAGE_DASHBOARD_TYPE_WORKSPACE = 'USAGE_DASHBOARD_TYPE_WORKSPACE'
 
 
 @dataclass
@@ -767,39 +1069,42 @@ def download(self,
 
 
 class BudgetsAPI:
-    """These APIs manage budget configuration including notifications for exceeding a budget for a period. They
-    can also retrieve the status of each budget."""
+    """These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your
+    account. You can set up budgets to either track account-wide spending, or apply filters to track the
+    spending of specific teams, projects, or workspaces."""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self, budget: Budget) -> WrappedBudgetWithStatus:
-        """Create a new budget.
+    def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse:
+        """Create new budget.
         
-        Creates a new budget in the specified account.
+        Create a new budget configuration for an account. For full details, see
+        https://docs.databricks.com/en/admin/account-settings/budgets.html.
         
-        :param budget: :class:`Budget`
-          Budget configuration to be created.
+        :param budget: :class:`CreateBudgetConfigurationBudget`
+          Properties of the new budget configuration.
         
-        :returns: :class:`WrappedBudgetWithStatus`
+        :returns: :class:`CreateBudgetConfigurationResponse`
         """
         body = {}
         if budget is not None: body['budget'] = budget.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/budget',
+                           f'/api/2.1/accounts/{self._api.account_id}/budgets',
                            body=body,
                            headers=headers)
-        return WrappedBudgetWithStatus.from_dict(res)
+        return CreateBudgetConfigurationResponse.from_dict(res)
 
     def delete(self, budget_id: str):
         """Delete budget.
         
-        Deletes the budget specified by its UUID.
+        Deletes a budget configuration for an account. Both account and budget configuration are specified by
+        ID. This cannot be undone.
         
         :param budget_id: str
-          Budget ID
+          The Databricks budget configuration ID.
         
         
         """
@@ -807,63 +1112,78 @@ def delete(self, budget_id: str):
         headers = {'Accept': 'application/json', }
 
         self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}',
+                     f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}',
                      headers=headers)
 
-    def get(self, budget_id: str) -> WrappedBudgetWithStatus:
-        """Get budget and its status.
+    def get(self, budget_id: str) -> GetBudgetConfigurationResponse:
+        """Get budget.
         
-        Gets the budget specified by its UUID, including noncumulative status for each day that the budget is
-        configured to include.
+        Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
         
         :param budget_id: str
-          Budget ID
+          The Databricks budget configuration ID.
         
-        :returns: :class:`WrappedBudgetWithStatus`
+        :returns: :class:`GetBudgetConfigurationResponse`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}',
+                           f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}',
                            headers=headers)
-        return WrappedBudgetWithStatus.from_dict(res)
+        return GetBudgetConfigurationResponse.from_dict(res)
 
-    def list(self) -> Iterator[BudgetWithStatus]:
+    def list(self, *, page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]:
         """Get all budgets.
         
-        Gets all budgets associated with this account, including noncumulative status for each day that the
-        budget is configured to include.
+        Gets all budgets associated with this account.
+        
+        :param page_token: str (optional)
+          A page token received from a previous get all budget configurations call. This token can be used to
+          retrieve the subsequent page. Requests first page if absent.
         
-        :returns: Iterator over :class:`BudgetWithStatus`
+        :returns: Iterator over :class:`BudgetConfiguration`
         """
 
+        query = {}
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/budget', headers=headers)
-        parsed = BudgetList.from_dict(json).budgets
-        return parsed if parsed is not None else []
-
-    def update(self, budget_id: str, budget: Budget):
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.1/accounts/{self._api.account_id}/budgets',
+                                query=query,
+                                headers=headers)
+            if 'budgets' in json:
+                for v in json['budgets']:
+                    yield BudgetConfiguration.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self, budget_id: str,
+               budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse:
         """Modify budget.
         
-        Modifies a budget in this account. Budget properties are completely overwritten.
+        Updates a budget configuration for an account. Both account and budget configuration are specified by
+        ID.
         
         :param budget_id: str
-          Budget ID
-        :param budget: :class:`Budget`
-          Budget configuration to be created.
-        
+          The Databricks budget configuration ID.
+        :param budget: :class:`UpdateBudgetConfigurationBudget`
+          The updated budget. This will overwrite the budget specified by the budget ID.
         
+        :returns: :class:`UpdateBudgetConfigurationResponse`
         """
         body = {}
         if budget is not None: body['budget'] = budget.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('PATCH',
-                     f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}',
-                     body=body,
-                     headers=headers)
+        res = self._api.do('PUT',
+                           f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}',
+                           body=body,
+                           headers=headers)
+        return UpdateBudgetConfigurationResponse.from_dict(res)
 
 
 class LogDeliveryAPI:
@@ -1037,3 +1357,67 @@ def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryCo
                      f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}',
                      body=body,
                      headers=headers)
+
+
+class UsageDashboardsAPI:
+    """These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into
+    your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost
+    drivers."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               *,
+               dashboard_type: Optional[UsageDashboardType] = None,
+               workspace_id: Optional[int] = None) -> CreateBillingUsageDashboardResponse:
+        """Create new usage dashboard.
+        
+        Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
+        
+        :param dashboard_type: :class:`UsageDashboardType` (optional)
+          Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
+          dashboard shows usage data for all workspaces in the account.
+        :param workspace_id: int (optional)
+          The workspace ID of the workspace in which the usage dashboard is created.
+        
+        :returns: :class:`CreateBillingUsageDashboardResponse`
+        """
+        body = {}
+        if dashboard_type is not None: body['dashboard_type'] = dashboard_type.value
+        if workspace_id is not None: body['workspace_id'] = workspace_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.0/accounts/{self._api.account_id}/dashboard',
+                           body=body,
+                           headers=headers)
+        return CreateBillingUsageDashboardResponse.from_dict(res)
+
+    def get(self,
+            *,
+            dashboard_type: Optional[UsageDashboardType] = None,
+            workspace_id: Optional[int] = None) -> GetBillingUsageDashboardResponse:
+        """Get usage dashboard.
+        
+        Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
+        
+        :param dashboard_type: :class:`UsageDashboardType` (optional)
+          Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
+          dashboard shows usage data for all workspaces in the account.
+        :param workspace_id: int (optional)
+          The workspace ID of the workspace in which the usage dashboard is created.
+        
+        :returns: :class:`GetBillingUsageDashboardResponse`
+        """
+
+        query = {}
+        if dashboard_type is not None: query['dashboard_type'] = dashboard_type.value
+        if workspace_id is not None: query['workspace_id'] = workspace_id
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           f'/api/2.0/accounts/{self._api.account_id}/dashboard',
+                           query=query,
+                           headers=headers)
+        return GetBillingUsageDashboardResponse.from_dict(res)
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index e6456bc01..b149dbbaa 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -274,6 +274,42 @@ def from_dict(cls, d: Dict[str, any]) -> AssignResponse:
         return cls()
 
 
+@dataclass
+class AwsCredentials:
+    """AWS temporary credentials for API authentication. Read more at
+    https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
+
+    access_key_id: Optional[str] = None
+    """The access key ID that identifies the temporary credentials."""
+
+    access_point: Optional[str] = None
+    """The Amazon Resource Name (ARN) of the S3 access point for temporary credentials related the
+    external location."""
+
+    secret_access_key: Optional[str] = None
+    """The secret access key that can be used to sign AWS API requests."""
+
+    session_token: Optional[str] = None
+    """The token that users must pass to AWS API to use the temporary credentials."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.session_token is not None: body['session_token'] = self.session_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
+        """Deserializes the AwsCredentials from a dictionary."""
+        return cls(access_key_id=d.get('access_key_id', None),
+                   access_point=d.get('access_point', None),
+                   secret_access_key=d.get('secret_access_key', None),
+                   session_token=d.get('session_token', None))
+
+
 @dataclass
 class AwsIamRoleRequest:
     role_arn: str
@@ -405,6 +441,26 @@ def from_dict(cls, d: Dict[str, any]) -> AzureServicePrincipal:
                    directory_id=d.get('directory_id', None))
 
 
+@dataclass
+class AzureUserDelegationSas:
+    """Azure temporary credentials for API authentication. Read more at
+    https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
+
+    sas_token: Optional[str] = None
+    """The signed URI (SAS Token) used to access blob services for a given path"""
+
+    def as_dict(self) -> dict:
+        """Serializes the AzureUserDelegationSas into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.sas_token is not None: body['sas_token'] = self.sas_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AzureUserDelegationSas:
+        """Deserializes the AzureUserDelegationSas from a dictionary."""
+        return cls(sas_token=d.get('sas_token', None))
+
+
 @dataclass
 class CancelRefreshResponse:
 
@@ -849,7 +905,11 @@ class ConnectionInfoSecurableKind(Enum):
     """Kind of connection securable."""
 
     CONNECTION_BIGQUERY = 'CONNECTION_BIGQUERY'
+    CONNECTION_BUILTIN_HIVE_METASTORE = 'CONNECTION_BUILTIN_HIVE_METASTORE'
     CONNECTION_DATABRICKS = 'CONNECTION_DATABRICKS'
+    CONNECTION_EXTERNAL_HIVE_METASTORE = 'CONNECTION_EXTERNAL_HIVE_METASTORE'
+    CONNECTION_GLUE = 'CONNECTION_GLUE'
+    CONNECTION_HTTP_BEARER = 'CONNECTION_HTTP_BEARER'
     CONNECTION_MYSQL = 'CONNECTION_MYSQL'
     CONNECTION_ONLINE_CATALOG = 'CONNECTION_ONLINE_CATALOG'
     CONNECTION_POSTGRESQL = 'CONNECTION_POSTGRESQL'
@@ -864,6 +924,9 @@ class ConnectionType(Enum):
 
     BIGQUERY = 'BIGQUERY'
     DATABRICKS = 'DATABRICKS'
+    GLUE = 'GLUE'
+    HIVE_METASTORE = 'HIVE_METASTORE'
+    HTTP = 'HTTP'
     MYSQL = 'MYSQL'
     POSTGRESQL = 'POSTGRESQL'
     REDSHIFT = 'REDSHIFT'
@@ -1023,6 +1086,11 @@ class CreateExternalLocation:
     encryption_details: Optional[EncryptionDetails] = None
     """Encryption options that apply to clients connecting to cloud storage."""
 
+    fallback: Optional[bool] = None
+    """Indicates whether fallback mode is enabled for this external location. When fallback mode is
+    enabled, the access to the location falls back to cluster credentials if UC credentials are not
+    sufficient."""
+
     read_only: Optional[bool] = None
     """Indicates whether the external location is read-only."""
 
@@ -1036,6 +1104,7 @@ def as_dict(self) -> dict:
         if self.comment is not None: body['comment'] = self.comment
         if self.credential_name is not None: body['credential_name'] = self.credential_name
         if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
+        if self.fallback is not None: body['fallback'] = self.fallback
         if self.name is not None: body['name'] = self.name
         if self.read_only is not None: body['read_only'] = self.read_only
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
@@ -1049,6 +1118,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateExternalLocation:
                    comment=d.get('comment', None),
                    credential_name=d.get('credential_name', None),
                    encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
+                   fallback=d.get('fallback', None),
                    name=d.get('name', None),
                    read_only=d.get('read_only', None),
                    skip_validation=d.get('skip_validation', None),
@@ -1074,9 +1144,6 @@ class CreateFunction:
     full_data_type: str
     """Pretty printed function data type."""
 
-    return_params: FunctionParameterInfos
-    """Table function return parameters."""
-
     routine_body: CreateFunctionRoutineBody
     """Function language. When **EXTERNAL** is used, the language of the routine function should be
     specified in the __external_language__ field, and the __return_params__ of the function cannot
@@ -1086,9 +1153,6 @@ class CreateFunction:
     routine_definition: str
     """Function body."""
 
-    routine_dependencies: DependencyList
-    """Function dependencies."""
-
     parameter_style: CreateFunctionParameterStyle
     """Function parameter style. **S** is the value for SQL."""
 
@@ -1119,6 +1183,12 @@ class CreateFunction:
     properties: Optional[str] = None
     """JSON-serialized key-value pair map, encoded (escaped) as a string."""
 
+    return_params: Optional[FunctionParameterInfos] = None
+    """Table function return parameters."""
+
+    routine_dependencies: Optional[DependencyList] = None
+    """Function dependencies."""
+
     sql_path: Optional[str] = None
     """List of schemes whose objects can be referenced without qualification."""
 
@@ -1256,7 +1326,8 @@ class CreateMetastoreAssignment:
     """The unique ID of the metastore."""
 
     default_catalog_name: str
-    """The name of the default catalog in the metastore."""
+    """The name of the default catalog in the metastore. This field is depracted. Please use "Default
+    Namespace API" to configure the default catalog for a Databricks workspace."""
 
     workspace_id: Optional[int] = None
     """A workspace ID."""
@@ -1607,6 +1678,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent:
 class CredentialType(Enum):
     """The type of credential."""
 
+    BEARER_TOKEN = 'BEARER_TOKEN'
     USERNAME_PASSWORD = 'USERNAME_PASSWORD'
 
 
@@ -1974,6 +2046,11 @@ class ExternalLocationInfo:
     encryption_details: Optional[EncryptionDetails] = None
     """Encryption options that apply to clients connecting to cloud storage."""
 
+    fallback: Optional[bool] = None
+    """Indicates whether fallback mode is enabled for this external location. When fallback mode is
+    enabled, the access to the location falls back to cluster credentials if UC credentials are not
+    sufficient."""
+
     isolation_mode: Optional[IsolationMode] = None
     """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
@@ -2009,6 +2086,7 @@ def as_dict(self) -> dict:
         if self.credential_id is not None: body['credential_id'] = self.credential_id
         if self.credential_name is not None: body['credential_name'] = self.credential_name
         if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
+        if self.fallback is not None: body['fallback'] = self.fallback
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
@@ -2030,6 +2108,7 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalLocationInfo:
                    credential_id=d.get('credential_id', None),
                    credential_name=d.get('credential_name', None),
                    encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
+                   fallback=d.get('fallback', None),
                    isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
                    metastore_id=d.get('metastore_id', None),
                    name=d.get('name', None),
@@ -2418,6 +2497,104 @@ class FunctionParameterType(Enum):
     PARAM = 'PARAM'
 
 
+@dataclass
+class GcpOauthToken:
+    """GCP temporary credentials for API authentication. Read more at
+    https://developers.google.com/identity/protocols/oauth2/service-account"""
+
+    oauth_token: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
+        """Deserializes the GcpOauthToken from a dictionary."""
+        return cls(oauth_token=d.get('oauth_token', None))
+
+
+@dataclass
+class GenerateTemporaryTableCredentialRequest:
+    operation: Optional[TableOperation] = None
+    """The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+    specified, the credentials returned will have write permissions, otherwise, it will be read
+    only."""
+
+    table_id: Optional[str] = None
+    """UUID of the table to read or write."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryTableCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.operation is not None: body['operation'] = self.operation.value
+        if self.table_id is not None: body['table_id'] = self.table_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRequest:
+        """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary."""
+        return cls(operation=_enum(d, 'operation', TableOperation), table_id=d.get('table_id', None))
+
+
+@dataclass
+class GenerateTemporaryTableCredentialResponse:
+    aws_temp_credentials: Optional[AwsCredentials] = None
+    """AWS temporary credentials for API authentication. Read more at
+    https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
+
+    azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None
+    """Azure temporary credentials for API authentication. Read more at
+    https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
+
+    expiration_time: Optional[int] = None
+    """Server time when the credential will expire, in epoch milliseconds. The API client is advised to
+    cache the credential given this expiration time."""
+
+    gcp_oauth_token: Optional[GcpOauthToken] = None
+    """GCP temporary credentials for API authentication. Read more at
+    https://developers.google.com/identity/protocols/oauth2/service-account"""
+
+    r2_temp_credentials: Optional[R2Credentials] = None
+    """R2 temporary credentials for API authentication. Read more at
+    https://developers.cloudflare.com/r2/api/s3/tokens/."""
+
+    url: Optional[str] = None
+    """The URL of the storage path accessible by the temporary credential."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
+        if self.azure_user_delegation_sas:
+            body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict()
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict()
+        if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials.as_dict()
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialResponse:
+        """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary."""
+        return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
+                   azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas',
+                                                        AzureUserDelegationSas),
+                   expiration_time=d.get('expiration_time', None),
+                   gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken),
+                   r2_temp_credentials=_from_dict(d, 'r2_temp_credentials', R2Credentials),
+                   url=d.get('url', None))
+
+
+class GetBindingsSecurableType(Enum):
+
+    CATALOG = 'catalog'
+    EXTERNAL_LOCATION = 'external_location'
+    STORAGE_CREDENTIAL = 'storage_credential'
+
+
 @dataclass
 class GetMetastoreSummaryResponse:
     cloud: Optional[str] = None
@@ -2442,6 +2619,9 @@ class GetMetastoreSummaryResponse:
     delta_sharing_scope: Optional[GetMetastoreSummaryResponseDeltaSharingScope] = None
     """The scope of Delta Sharing enabled for the metastore."""
 
+    external_access_enabled: Optional[bool] = None
+    """Whether to allow non-DBR clients to directly access entities under the metastore."""
+
     global_metastore_id: Optional[str] = None
     """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`."""
 
@@ -2489,6 +2669,8 @@ def as_dict(self) -> dict:
             body[
                 'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
         if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
+        if self.external_access_enabled is not None:
+            body['external_access_enabled'] = self.external_access_enabled
         if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
@@ -2517,6 +2699,7 @@ def from_dict(cls, d: Dict[str, any]) -> GetMetastoreSummaryResponse:
                        'delta_sharing_recipient_token_lifetime_in_seconds', None),
                    delta_sharing_scope=_enum(d, 'delta_sharing_scope',
                                              GetMetastoreSummaryResponseDeltaSharingScope),
+                   external_access_enabled=d.get('external_access_enabled', None),
                    global_metastore_id=d.get('global_metastore_id', None),
                    metastore_id=d.get('metastore_id', None),
                    name=d.get('name', None),
@@ -2537,6 +2720,23 @@ class GetMetastoreSummaryResponseDeltaSharingScope(Enum):
     INTERNAL_AND_EXTERNAL = 'INTERNAL_AND_EXTERNAL'
 
 
+@dataclass
+class GetQuotaResponse:
+    quota_info: Optional[QuotaInfo] = None
+    """The returned QuotaInfo."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.quota_info: body['quota_info'] = self.quota_info.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse:
+        """Deserializes the GetQuotaResponse from a dictionary."""
+        return cls(quota_info=_from_dict(d, 'quota_info', QuotaInfo))
+
+
 class IsolationMode(Enum):
     """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
@@ -2712,6 +2912,29 @@ def from_dict(cls, d: Dict[str, any]) -> ListModelVersionsResponse:
                    next_page_token=d.get('next_page_token', None))
 
 
+@dataclass
+class ListQuotasResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request."""
+
+    quotas: Optional[List[QuotaInfo]] = None
+    """An array of returned QuotaInfos."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.quotas: body['quotas'] = [v.as_dict() for v in self.quotas]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListQuotasResponse:
+        """Deserializes the ListQuotasResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   quotas=_repeated_dict(d, 'quotas', QuotaInfo))
+
+
 @dataclass
 class ListRegisteredModelsResponse:
     next_page_token: Optional[str] = None
@@ -2782,19 +3005,25 @@ def from_dict(cls, d: Dict[str, any]) -> ListStorageCredentialsResponse:
 
 @dataclass
 class ListSystemSchemasResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
     schemas: Optional[List[SystemSchemaInfo]] = None
     """An array of system schema information objects."""
 
     def as_dict(self) -> dict:
         """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSystemSchemasResponse:
         """Deserializes the ListSystemSchemasResponse from a dictionary."""
-        return cls(schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo))
+        return cls(next_page_token=d.get('next_page_token', None),
+                   schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo))
 
 
 @dataclass
@@ -2923,6 +3152,9 @@ class MetastoreInfo:
     delta_sharing_scope: Optional[MetastoreInfoDeltaSharingScope] = None
     """The scope of Delta Sharing enabled for the metastore."""
 
+    external_access_enabled: Optional[bool] = None
+    """Whether to allow non-DBR clients to directly access entities under the metastore."""
+
     global_metastore_id: Optional[str] = None
     """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`."""
 
@@ -2970,6 +3202,8 @@ def as_dict(self) -> dict:
             body[
                 'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
         if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
+        if self.external_access_enabled is not None:
+            body['external_access_enabled'] = self.external_access_enabled
         if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
@@ -2997,6 +3231,7 @@ def from_dict(cls, d: Dict[str, any]) -> MetastoreInfo:
                    delta_sharing_recipient_token_lifetime_in_seconds=d.get(
                        'delta_sharing_recipient_token_lifetime_in_seconds', None),
                    delta_sharing_scope=_enum(d, 'delta_sharing_scope', MetastoreInfoDeltaSharingScope),
+                   external_access_enabled=d.get('external_access_enabled', None),
                    global_metastore_id=d.get('global_metastore_id', None),
                    metastore_id=d.get('metastore_id', None),
                    name=d.get('name', None),
@@ -3019,6 +3254,9 @@ class MetastoreInfoDeltaSharingScope(Enum):
 
 @dataclass
 class ModelVersionInfo:
+    aliases: Optional[List[RegisteredModelAlias]] = None
+    """List of aliases associated with the model version"""
+
     browse_only: Optional[bool] = None
     """Indicates whether the principal is limited to retrieving metadata for the associated object
     through the BROWSE privilege when include_browse is enabled in the request."""
@@ -3079,6 +3317,7 @@ class ModelVersionInfo:
     def as_dict(self) -> dict:
         """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases]
         if self.browse_only is not None: body['browse_only'] = self.browse_only
         if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
         if self.comment is not None: body['comment'] = self.comment
@@ -3103,7 +3342,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionInfo:
         """Deserializes the ModelVersionInfo from a dictionary."""
-        return cls(browse_only=d.get('browse_only', None),
+        return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias),
+                   browse_only=d.get('browse_only', None),
                    catalog_name=d.get('catalog_name', None),
                    comment=d.get('comment', None),
                    created_at=d.get('created_at', None),
@@ -3625,11 +3865,16 @@ class OnlineTable:
     """Specification of the online table."""
 
     status: Optional[OnlineTableStatus] = None
-    """Online Table status"""
+    """Online Table data synchronization status"""
 
     table_serving_url: Optional[str] = None
     """Data serving REST API URL for this table"""
 
+    unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
+    """The provisioning state of the online table entity in Unity Catalog. This is distinct from the
+    state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
+    may be in "PROVISIONING" as it runs asynchronously)."""
+
     def as_dict(self) -> dict:
         """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -3637,6 +3882,8 @@ def as_dict(self) -> dict:
         if self.spec: body['spec'] = self.spec.as_dict()
         if self.status: body['status'] = self.status.as_dict()
         if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
+        if self.unity_catalog_provisioning_state is not None:
+            body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value
         return body
 
     @classmethod
@@ -3645,7 +3892,9 @@ def from_dict(cls, d: Dict[str, any]) -> OnlineTable:
         return cls(name=d.get('name', None),
                    spec=_from_dict(d, 'spec', OnlineTableSpec),
                    status=_from_dict(d, 'status', OnlineTableStatus),
-                   table_serving_url=d.get('table_serving_url', None))
+                   table_serving_url=d.get('table_serving_url', None),
+                   unity_catalog_provisioning_state=_enum(d, 'unity_catalog_provisioning_state',
+                                                          ProvisioningInfoState))
 
 
 @dataclass
@@ -3742,7 +3991,6 @@ class OnlineTableState(Enum):
     ONLINE_CONTINUOUS_UPDATE = 'ONLINE_CONTINUOUS_UPDATE'
     ONLINE_NO_PENDING_UPDATE = 'ONLINE_NO_PENDING_UPDATE'
     ONLINE_PIPELINE_FAILED = 'ONLINE_PIPELINE_FAILED'
-    ONLINE_TABLE_STATE_UNSPECIFIED = 'ONLINE_TABLE_STATE_UNSPECIFIED'
     ONLINE_TRIGGERED_UPDATE = 'ONLINE_TRIGGERED_UPDATE'
     ONLINE_UPDATING_PIPELINE_RESOURCES = 'ONLINE_UPDATING_PIPELINE_RESOURCES'
     PROVISIONING = 'PROVISIONING'
@@ -3935,6 +4183,7 @@ class Privilege(Enum):
     CREATE_VIEW = 'CREATE_VIEW'
     CREATE_VOLUME = 'CREATE_VOLUME'
     EXECUTE = 'EXECUTE'
+    MANAGE = 'MANAGE'
     MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST'
     MODIFY = 'MODIFY'
     READ_FILES = 'READ_FILES'
@@ -4004,7 +4253,7 @@ class ProvisioningInfoState(Enum):
     DELETING = 'DELETING'
     FAILED = 'FAILED'
     PROVISIONING = 'PROVISIONING'
-    STATE_UNSPECIFIED = 'STATE_UNSPECIFIED'
+    UPDATING = 'UPDATING'
 
 
 @dataclass
@@ -4030,6 +4279,122 @@ def from_dict(cls, d: Dict[str, any]) -> ProvisioningStatus:
             initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', PipelineProgress))
 
 
+@dataclass
+class QuotaInfo:
+    last_refreshed_at: Optional[int] = None
+    """The timestamp that indicates when the quota count was last updated."""
+
+    parent_full_name: Optional[str] = None
+    """Name of the parent resource. Returns metastore ID if the parent is a metastore."""
+
+    parent_securable_type: Optional[SecurableType] = None
+    """The quota parent securable type."""
+
+    quota_count: Optional[int] = None
+    """The current usage of the resource quota."""
+
+    quota_limit: Optional[int] = None
+    """The current limit of the resource quota."""
+
+    quota_name: Optional[str] = None
+    """The name of the quota."""
+
+    def as_dict(self) -> dict:
+        """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at
+        if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name
+        if self.parent_securable_type is not None:
+            body['parent_securable_type'] = self.parent_securable_type.value
+        if self.quota_count is not None: body['quota_count'] = self.quota_count
+        if self.quota_limit is not None: body['quota_limit'] = self.quota_limit
+        if self.quota_name is not None: body['quota_name'] = self.quota_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QuotaInfo:
+        """Deserializes the QuotaInfo from a dictionary."""
+        return cls(last_refreshed_at=d.get('last_refreshed_at', None),
+                   parent_full_name=d.get('parent_full_name', None),
+                   parent_securable_type=_enum(d, 'parent_securable_type', SecurableType),
+                   quota_count=d.get('quota_count', None),
+                   quota_limit=d.get('quota_limit', None),
+                   quota_name=d.get('quota_name', None))
+
+
+@dataclass
+class R2Credentials:
+    """R2 temporary credentials for API authentication. Read more at
+    https://developers.cloudflare.com/r2/api/s3/tokens/."""
+
+    access_key_id: Optional[str] = None
+    """The access key ID that identifies the temporary credentials."""
+
+    secret_access_key: Optional[str] = None
+    """The secret access key associated with the access key."""
+
+    session_token: Optional[str] = None
+    """The generated JWT that users must pass to use the temporary credentials."""
+
+    def as_dict(self) -> dict:
+        """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.session_token is not None: body['session_token'] = self.session_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> R2Credentials:
+        """Deserializes the R2Credentials from a dictionary."""
+        return cls(access_key_id=d.get('access_key_id', None),
+                   secret_access_key=d.get('secret_access_key', None),
+                   session_token=d.get('session_token', None))
+
+
+@dataclass
+class RegenerateDashboardRequest:
+    table_name: Optional[str] = None
+    """Full name of the table."""
+
+    warehouse_id: Optional[str] = None
+    """Optional argument to specify the warehouse for dashboard regeneration. If not specified, the
+    first running warehouse will be used."""
+
+    def as_dict(self) -> dict:
+        """Serializes the RegenerateDashboardRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardRequest:
+        """Deserializes the RegenerateDashboardRequest from a dictionary."""
+        return cls(table_name=d.get('table_name', None), warehouse_id=d.get('warehouse_id', None))
+
+
+@dataclass
+class RegenerateDashboardResponse:
+    dashboard_id: Optional[str] = None
+    """Id of the regenerated monitoring dashboard."""
+
+    parent_folder: Optional[str] = None
+    """The directory where the regenerated dashboard is stored."""
+
+    def as_dict(self) -> dict:
+        """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.parent_folder is not None: body['parent_folder'] = self.parent_folder
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardResponse:
+        """Deserializes the RegenerateDashboardResponse from a dictionary."""
+        return cls(dashboard_id=d.get('dashboard_id', None), parent_folder=d.get('parent_folder', None))
+
+
 @dataclass
 class RegisteredModelAlias:
     """Registered model alias."""
@@ -4732,6 +5097,12 @@ def from_dict(cls, d: Dict[str, any]) -> TableInfo:
                    view_dependencies=_from_dict(d, 'view_dependencies', DependencyList))
 
 
+class TableOperation(Enum):
+
+    READ = 'READ'
+    READ_WRITE = 'READ_WRITE'
+
+
 @dataclass
 class TableRowFilter:
     function_name: str
@@ -4849,6 +5220,13 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse:
         return cls()
 
 
+class UpdateBindingsSecurableType(Enum):
+
+    CATALOG = 'catalog'
+    EXTERNAL_LOCATION = 'external_location'
+    STORAGE_CREDENTIAL = 'storage_credential'
+
+
 @dataclass
 class UpdateCatalog:
     comment: Optional[str] = None
@@ -4944,6 +5322,11 @@ class UpdateExternalLocation:
     encryption_details: Optional[EncryptionDetails] = None
     """Encryption options that apply to clients connecting to cloud storage."""
 
+    fallback: Optional[bool] = None
+    """Indicates whether fallback mode is enabled for this external location. When fallback mode is
+    enabled, the access to the location falls back to cluster credentials if UC credentials are not
+    sufficient."""
+
     force: Optional[bool] = None
     """Force update even if changing url invalidates dependent external tables or mounts."""
 
@@ -4975,6 +5358,7 @@ def as_dict(self) -> dict:
         if self.comment is not None: body['comment'] = self.comment
         if self.credential_name is not None: body['credential_name'] = self.credential_name
         if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
+        if self.fallback is not None: body['fallback'] = self.fallback
         if self.force is not None: body['force'] = self.force
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
         if self.name is not None: body['name'] = self.name
@@ -4992,6 +5376,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateExternalLocation:
                    comment=d.get('comment', None),
                    credential_name=d.get('credential_name', None),
                    encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
+                   fallback=d.get('fallback', None),
                    force=d.get('force', None),
                    isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
                    name=d.get('name', None),
@@ -5086,7 +5471,8 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateMetastore:
 @dataclass
 class UpdateMetastoreAssignment:
     default_catalog_name: Optional[str] = None
-    """The name of the default catalog for the metastore."""
+    """The name of the default catalog in the metastore. This field is depracted. Please use "Default
+    Namespace API" to configure the default catalog for a Databricks workspace."""
 
     metastore_id: Optional[str] = None
     """The unique ID of the metastore."""
@@ -5492,8 +5878,8 @@ class UpdateWorkspaceBindingsParameters:
     securable_name: Optional[str] = None
     """The name of the securable."""
 
-    securable_type: Optional[str] = None
-    """The type of the securable."""
+    securable_type: Optional[UpdateBindingsSecurableType] = None
+    """The type of the securable to bind to a workspace."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body."""
@@ -5501,7 +5887,7 @@ def as_dict(self) -> dict:
         if self.add: body['add'] = [v.as_dict() for v in self.add]
         if self.remove: body['remove'] = [v.as_dict() for v in self.remove]
         if self.securable_name is not None: body['securable_name'] = self.securable_name
-        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        if self.securable_type is not None: body['securable_type'] = self.securable_type.value
         return body
 
     @classmethod
@@ -5510,7 +5896,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters:
         return cls(add=_repeated_dict(d, 'add', WorkspaceBinding),
                    remove=_repeated_dict(d, 'remove', WorkspaceBinding),
                    securable_name=d.get('securable_name', None),
-                   securable_type=d.get('securable_type', None))
+                   securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType))
 
 
 @dataclass
@@ -5776,16 +6162,22 @@ class WorkspaceBindingsResponse:
     bindings: Optional[List[WorkspaceBinding]] = None
     """List of workspace bindings"""
 
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
     def as_dict(self) -> dict:
         """Serializes the WorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.bindings: body['bindings'] = [v.as_dict() for v in self.bindings]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceBindingsResponse:
         """Deserializes the WorkspaceBindingsResponse from a dictionary."""
-        return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding))
+        return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding),
+                   next_page_token=d.get('next_page_token', None))
 
 
 class AccountMetastoreAssignmentsAPI:
@@ -6566,6 +6958,7 @@ def create(self,
                access_point: Optional[str] = None,
                comment: Optional[str] = None,
                encryption_details: Optional[EncryptionDetails] = None,
+               fallback: Optional[bool] = None,
                read_only: Optional[bool] = None,
                skip_validation: Optional[bool] = None) -> ExternalLocationInfo:
         """Create an external location.
@@ -6586,6 +6979,10 @@ def create(self,
           User-provided free-form text description.
         :param encryption_details: :class:`EncryptionDetails` (optional)
           Encryption options that apply to clients connecting to cloud storage.
+        :param fallback: bool (optional)
+          Indicates whether fallback mode is enabled for this external location. When fallback mode is
+          enabled, the access to the location falls back to cluster credentials if UC credentials are not
+          sufficient.
         :param read_only: bool (optional)
           Indicates whether the external location is read-only.
         :param skip_validation: bool (optional)
@@ -6598,6 +6995,7 @@ def create(self,
         if comment is not None: body['comment'] = comment
         if credential_name is not None: body['credential_name'] = credential_name
         if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict()
+        if fallback is not None: body['fallback'] = fallback
         if name is not None: body['name'] = name
         if read_only is not None: body['read_only'] = read_only
         if skip_validation is not None: body['skip_validation'] = skip_validation
@@ -6705,6 +7103,7 @@ def update(self,
                comment: Optional[str] = None,
                credential_name: Optional[str] = None,
                encryption_details: Optional[EncryptionDetails] = None,
+               fallback: Optional[bool] = None,
                force: Optional[bool] = None,
                isolation_mode: Optional[IsolationMode] = None,
                new_name: Optional[str] = None,
@@ -6728,6 +7127,10 @@ def update(self,
           Name of the storage credential used with this location.
         :param encryption_details: :class:`EncryptionDetails` (optional)
           Encryption options that apply to clients connecting to cloud storage.
+        :param fallback: bool (optional)
+          Indicates whether fallback mode is enabled for this external location. When fallback mode is
+          enabled, the access to the location falls back to cluster credentials if UC credentials are not
+          sufficient.
         :param force: bool (optional)
           Force update even if changing url invalidates dependent external tables or mounts.
         :param isolation_mode: :class:`IsolationMode` (optional)
@@ -6750,6 +7153,7 @@ def update(self,
         if comment is not None: body['comment'] = comment
         if credential_name is not None: body['credential_name'] = credential_name
         if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict()
+        if fallback is not None: body['fallback'] = fallback
         if force is not None: body['force'] = force
         if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
         if new_name is not None: body['new_name'] = new_name
@@ -7056,7 +7460,8 @@ def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str
         :param metastore_id: str
           The unique ID of the metastore.
         :param default_catalog_name: str
-          The name of the default catalog in the metastore.
+          The name of the default catalog in the metastore. This field is depracted. Please use "Default
+          Namespace API" to configure the default catalog for a Databricks workspace.
         
         
         """
@@ -7269,7 +7674,8 @@ def update_assignment(self,
         :param workspace_id: int
           A workspace ID.
         :param default_catalog_name: str (optional)
-          The name of the default catalog for the metastore.
+          The name of the default catalog in the metastore. This field is depracted. Please use "Default
+          Namespace API" to configure the default catalog for a Databricks workspace.
         :param metastore_id: str (optional)
           The unique ID of the metastore.
         
@@ -7325,7 +7731,8 @@ def get(self,
             full_name: str,
             version: int,
             *,
-            include_browse: Optional[bool] = None) -> RegisteredModelInfo:
+            include_aliases: Optional[bool] = None,
+            include_browse: Optional[bool] = None) -> ModelVersionInfo:
         """Get a Model Version.
         
         Get a model version.
@@ -7338,14 +7745,17 @@ def get(self,
           The three-level (fully qualified) name of the model version
         :param version: int
           The integer version number of the model version
+        :param include_aliases: bool (optional)
+          Whether to include aliases associated with the model version in the response
         :param include_browse: bool (optional)
           Whether to include model versions in the response for which the principal can only access selective
           metadata for
         
-        :returns: :class:`RegisteredModelInfo`
+        :returns: :class:`ModelVersionInfo`
         """
 
         query = {}
+        if include_aliases is not None: query['include_aliases'] = include_aliases
         if include_browse is not None: query['include_browse'] = include_browse
         headers = {'Accept': 'application/json', }
 
@@ -7353,9 +7763,13 @@ def get(self,
                            f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}',
                            query=query,
                            headers=headers)
-        return RegisteredModelInfo.from_dict(res)
+        return ModelVersionInfo.from_dict(res)
 
-    def get_by_alias(self, full_name: str, alias: str) -> ModelVersionInfo:
+    def get_by_alias(self,
+                     full_name: str,
+                     alias: str,
+                     *,
+                     include_aliases: Optional[bool] = None) -> ModelVersionInfo:
         """Get Model Version By Alias.
         
         Get a model version by alias.
@@ -7368,14 +7782,19 @@ def get_by_alias(self, full_name: str, alias: str) -> ModelVersionInfo:
           The three-level (fully qualified) name of the registered model
         :param alias: str
           The name of the alias
+        :param include_aliases: bool (optional)
+          Whether to include aliases associated with the model version in the response
         
         :returns: :class:`ModelVersionInfo`
         """
 
+        query = {}
+        if include_aliases is not None: query['include_aliases'] = include_aliases
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET',
                            f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}',
+                           query=query,
                            headers=headers)
         return ModelVersionInfo.from_dict(res)
 
@@ -7751,6 +8170,40 @@ def list_refreshes(self, table_name: str) -> MonitorRefreshListResponse:
                            headers=headers)
         return MonitorRefreshListResponse.from_dict(res)
 
+    def regenerate_dashboard(self,
+                             table_name: str,
+                             *,
+                             warehouse_id: Optional[str] = None) -> RegenerateDashboardResponse:
+        """Regenerate a monitoring dashboard.
+        
+        Regenerates the monitoring dashboard for the specified table.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+        owner of the table
+        
+        The call must be made from the workspace where the monitor was created. The dashboard will be
+        regenerated in the assets directory that was specified when the monitor was created.
+        
+        :param table_name: str
+          Full name of the table.
+        :param warehouse_id: str (optional)
+          Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first
+          running warehouse will be used.
+        
+        :returns: :class:`RegenerateDashboardResponse`
+        """
+        body = {}
+        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard',
+                           body=body,
+                           headers=headers)
+        return RegenerateDashboardResponse.from_dict(res)
+
     def run_refresh(self, table_name: str) -> MonitorRefreshInfo:
         """Queue a metric refresh for a monitor.
         
@@ -7971,7 +8424,11 @@ def delete_alias(self, full_name: str, alias: str):
 
         self._api.do('DELETE', f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', headers=headers)
 
-    def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> RegisteredModelInfo:
+    def get(self,
+            full_name: str,
+            *,
+            include_aliases: Optional[bool] = None,
+            include_browse: Optional[bool] = None) -> RegisteredModelInfo:
         """Get a Registered Model.
         
         Get a registered model.
@@ -7982,6 +8439,8 @@ def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> Regis
         
         :param full_name: str
           The three-level (fully qualified) name of the registered model
+        :param include_aliases: bool (optional)
+          Whether to include registered model aliases in the response
         :param include_browse: bool (optional)
           Whether to include registered models in the response for which the principal can only access
           selective metadata for
@@ -7990,6 +8449,7 @@ def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> Regis
         """
 
         query = {}
+        if include_aliases is not None: query['include_aliases'] = include_aliases
         if include_browse is not None: query['include_browse'] = include_browse
         headers = {'Accept': 'application/json', }
 
@@ -8127,6 +8587,78 @@ def update(self,
         return RegisteredModelInfo.from_dict(res)
 
 
+class ResourceQuotasAPI:
+    """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that
+    can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per
+    metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and
+    limits. For more information on resource quotas see the [Unity Catalog documentation].
+    
+    [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def get_quota(self, parent_securable_type: str, parent_full_name: str,
+                  quota_name: str) -> GetQuotaResponse:
+        """Get information for a single resource quota.
+        
+        The GetQuota API returns usage information for a single resource quota, defined as a child-parent
+        pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered
+        asynchronously. The updated count might not be returned in the first call.
+        
+        :param parent_securable_type: str
+          Securable type of the quota parent.
+        :param parent_full_name: str
+          Full name of the parent resource. Provide the metastore ID if the parent is a metastore.
+        :param quota_name: str
+          Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix.
+        
+        :returns: :class:`GetQuotaResponse`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}',
+            headers=headers)
+        return GetQuotaResponse.from_dict(res)
+
+    def list_quotas(self,
+                    *,
+                    max_results: Optional[int] = None,
+                    page_token: Optional[str] = None) -> Iterator[QuotaInfo]:
+        """List all resource quotas under a metastore.
+        
+        ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the
+        counts returned. This API does not trigger a refresh of quota counts.
+        
+        :param max_results: int (optional)
+          The number of quotas to return.
+        :param page_token: str (optional)
+          Opaque token for the next page of results.
+        
+        :returns: Iterator over :class:`QuotaInfo`
+        """
+
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                '/api/2.1/unity-catalog/resource-quotas/all-resource-quotas',
+                                query=query,
+                                headers=headers)
+            if 'quotas' in json:
+                for v in json['quotas']:
+                    yield QuotaInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+
 class SchemasAPI:
     """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema
     organizes tables, views and functions. To access (or list) a table or view in a schema, users must have
@@ -8172,7 +8704,7 @@ def create(self,
         res = self._api.do('POST', '/api/2.1/unity-catalog/schemas', body=body, headers=headers)
         return SchemaInfo.from_dict(res)
 
-    def delete(self, full_name: str):
+    def delete(self, full_name: str, *, force: Optional[bool] = None):
         """Delete a schema.
         
         Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an
@@ -8180,13 +8712,17 @@ def delete(self, full_name: str):
         
         :param full_name: str
           Full name of the schema.
+        :param force: bool (optional)
+          Force deletion even if the schema is not empty.
         
         
         """
 
+        query = {}
+        if force is not None: query['force'] = force
         headers = {'Accept': 'application/json', }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/schemas/{full_name}', headers=headers)
+        self._api.do('DELETE', f'/api/2.1/unity-catalog/schemas/{full_name}', query=query, headers=headers)
 
     def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> SchemaInfo:
         """Get a schema.
@@ -8632,7 +9168,11 @@ def enable(self, metastore_id: str, schema_name: str):
                      f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}',
                      headers=headers)
 
-    def list(self, metastore_id: str) -> Iterator[SystemSchemaInfo]:
+    def list(self,
+             metastore_id: str,
+             *,
+             max_results: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[SystemSchemaInfo]:
         """List system schemas.
         
         Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore
@@ -8640,17 +9180,33 @@ def list(self, metastore_id: str) -> Iterator[SystemSchemaInfo]:
         
         :param metastore_id: str
           The ID for the metastore in which the system schema resides.
+        :param max_results: int (optional)
+          Maximum number of schemas to return. - When set to 0, the page length is set to a server configured
+          value (recommended); - When set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - When set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all the schemas are returned (not recommended).
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: Iterator over :class:`SystemSchemaInfo`
         """
 
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET',
-                            f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas',
-                            headers=headers)
-        parsed = ListSystemSchemasResponse.from_dict(json).schemas
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas',
+                                query=query,
+                                headers=headers)
+            if 'schemas' in json:
+                for v in json['schemas']:
+                    yield SystemSchemaInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
 
 class TableConstraintsAPI:
@@ -8786,7 +9342,8 @@ def get(self,
             full_name: str,
             *,
             include_browse: Optional[bool] = None,
-            include_delta_metadata: Optional[bool] = None) -> TableInfo:
+            include_delta_metadata: Optional[bool] = None,
+            include_manifest_capabilities: Optional[bool] = None) -> TableInfo:
         """Get a table.
         
         Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the
@@ -8802,6 +9359,8 @@ def get(self,
           for
         :param include_delta_metadata: bool (optional)
           Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         
         :returns: :class:`TableInfo`
         """
@@ -8809,6 +9368,8 @@ def get(self,
         query = {}
         if include_browse is not None: query['include_browse'] = include_browse
         if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
+        if include_manifest_capabilities is not None:
+            query['include_manifest_capabilities'] = include_manifest_capabilities
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{full_name}', query=query, headers=headers)
@@ -8820,6 +9381,7 @@ def list(self,
              *,
              include_browse: Optional[bool] = None,
              include_delta_metadata: Optional[bool] = None,
+             include_manifest_capabilities: Optional[bool] = None,
              max_results: Optional[int] = None,
              omit_columns: Optional[bool] = None,
              omit_properties: Optional[bool] = None,
@@ -8841,6 +9403,8 @@ def list(self,
           for
         :param include_delta_metadata: bool (optional)
           Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         :param max_results: int (optional)
           Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
           when set to a value greater than 0, the page length is the minimum of this value and a server
@@ -8860,6 +9424,8 @@ def list(self,
         if catalog_name is not None: query['catalog_name'] = catalog_name
         if include_browse is not None: query['include_browse'] = include_browse
         if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
+        if include_manifest_capabilities is not None:
+            query['include_manifest_capabilities'] = include_manifest_capabilities
         if max_results is not None: query['max_results'] = max_results
         if omit_columns is not None: query['omit_columns'] = omit_columns
         if omit_properties is not None: query['omit_properties'] = omit_properties
@@ -8879,6 +9445,7 @@ def list(self,
     def list_summaries(self,
                        catalog_name: str,
                        *,
+                       include_manifest_capabilities: Optional[bool] = None,
                        max_results: Optional[int] = None,
                        page_token: Optional[str] = None,
                        schema_name_pattern: Optional[str] = None,
@@ -8898,6 +9465,8 @@ def list_summaries(self,
         
         :param catalog_name: str
           Name of parent catalog for tables of interest.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         :param max_results: int (optional)
           Maximum number of summaries for tables to return. If not set, the page length is set to a server
           configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
@@ -8916,6 +9485,8 @@ def list_summaries(self,
 
         query = {}
         if catalog_name is not None: query['catalog_name'] = catalog_name
+        if include_manifest_capabilities is not None:
+            query['include_manifest_capabilities'] = include_manifest_capabilities
         if max_results is not None: query['max_results'] = max_results
         if page_token is not None: query['page_token'] = page_token
         if schema_name_pattern is not None: query['schema_name_pattern'] = schema_name_pattern
@@ -8952,6 +9523,55 @@ def update(self, full_name: str, *, owner: Optional[str] = None):
         self._api.do('PATCH', f'/api/2.1/unity-catalog/tables/{full_name}', body=body, headers=headers)
 
 
+class TemporaryTableCredentialsAPI:
+    """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage
+    locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
+    time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
+    has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
+    Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
+    temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
+    scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
+    credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
+    the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
+    by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
+    catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+    security reason."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def generate_temporary_table_credentials(
+            self,
+            *,
+            operation: Optional[TableOperation] = None,
+            table_id: Optional[str] = None) -> GenerateTemporaryTableCredentialResponse:
+        """Generate a temporary table credential.
+        
+        Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
+        must have external_access_enabled flag set to true (default false). The caller must have
+        EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
+        owners.
+        
+        :param operation: :class:`TableOperation` (optional)
+          The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+          specified, the credentials returned will have write permissions, otherwise, it will be read only.
+        :param table_id: str (optional)
+          UUID of the table to read or write.
+        
+        :returns: :class:`GenerateTemporaryTableCredentialResponse`
+        """
+        body = {}
+        if operation is not None: body['operation'] = operation.value
+        if table_id is not None: body['table_id'] = table_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           '/api/2.0/unity-catalog/temporary-table-credentials',
+                           body=body,
+                           headers=headers)
+        return GenerateTemporaryTableCredentialResponse.from_dict(res)
+
+
 class VolumesAPI:
     """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing
     files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF
@@ -9172,7 +9792,7 @@ class WorkspaceBindingsAPI:
     the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the
     ability to bind a securable in READ_ONLY mode (catalogs only).
     
-    Securables that support binding: - catalog"""
+    Securable types that support binding: - catalog - storage_credential - external_location"""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -9196,26 +9816,48 @@ def get(self, name: str) -> CurrentWorkspaceBindings:
                            headers=headers)
         return CurrentWorkspaceBindings.from_dict(res)
 
-    def get_bindings(self, securable_type: str, securable_name: str) -> WorkspaceBindingsResponse:
+    def get_bindings(self,
+                     securable_type: GetBindingsSecurableType,
+                     securable_name: str,
+                     *,
+                     max_results: Optional[int] = None,
+                     page_token: Optional[str] = None) -> Iterator[WorkspaceBinding]:
         """Get securable workspace bindings.
         
         Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the
         securable.
         
-        :param securable_type: str
-          The type of the securable.
+        :param securable_type: :class:`GetBindingsSecurableType`
+          The type of the securable to bind to a workspace.
         :param securable_name: str
           The name of the securable.
+        :param max_results: int (optional)
+          Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server
+          configured value (recommended); - When set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - When set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all the workspace bindings are returned (not recommended).
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
-        :returns: :class:`WorkspaceBindingsResponse`
+        :returns: Iterator over :class:`WorkspaceBinding`
         """
 
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}',
-                           headers=headers)
-        return WorkspaceBindingsResponse.from_dict(res)
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}',
+                                query=query,
+                                headers=headers)
+            if 'bindings' in json:
+                for v in json['bindings']:
+                    yield WorkspaceBinding.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
     def update(self,
                name: str,
@@ -9248,7 +9890,7 @@ def update(self,
         return CurrentWorkspaceBindings.from_dict(res)
 
     def update_bindings(self,
-                        securable_type: str,
+                        securable_type: UpdateBindingsSecurableType,
                         securable_name: str,
                         *,
                         add: Optional[List[WorkspaceBinding]] = None,
@@ -9258,8 +9900,8 @@ def update_bindings(self,
         Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the
         securable.
         
-        :param securable_type: str
-          The type of the securable.
+        :param securable_type: :class:`UpdateBindingsSecurableType`
+          The type of the securable to bind to a workspace.
         :param securable_name: str
           The name of the securable.
         :param add: List[:class:`WorkspaceBinding`] (optional)
@@ -9275,7 +9917,7 @@ def update_bindings(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH',
-                           f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}',
+                           f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}',
                            body=body,
                            headers=headers)
         return WorkspaceBindingsResponse.from_dict(res)
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 8b954db8f..fabd258d0 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -598,8 +598,13 @@ class ClusterAttributes:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -690,6 +695,35 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes:
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
+@dataclass
+class ClusterCompliance:
+    cluster_id: str
+    """Canonical unique identifier for a cluster."""
+
+    is_compliant: Optional[bool] = None
+    """Whether this cluster is in compliance with the latest version of its policy."""
+
+    violations: Optional[Dict[str, str]] = None
+    """An object containing key-value mappings representing the first 200 policy validation errors. The
+    keys indicate the path where the policy validation error is occurring. The values indicate an
+    error message describing the policy validation error."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ClusterCompliance into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ClusterCompliance:
+        """Deserializes the ClusterCompliance from a dictionary."""
+        return cls(cluster_id=d.get('cluster_id', None),
+                   is_compliant=d.get('is_compliant', None),
+                   violations=d.get('violations', None))
+
+
 @dataclass
 class ClusterDetails:
     autoscale: Optional[AutoScale] = None
@@ -881,8 +915,13 @@ class ClusterDetails:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -1405,6 +1444,40 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsRequest:
                    cluster_policy_id=d.get('cluster_policy_id', None))
 
 
+@dataclass
+class ClusterSettingsChange:
+    """Represents a change to the cluster settings required for the cluster to become compliant with
+    its policy."""
+
+    field: Optional[str] = None
+    """The field where this change would be made."""
+
+    new_value: Optional[str] = None
+    """The new value of this field after enforcing policy compliance (either a number, a boolean, or a
+    string) converted to a string. This is intended to be read by a human. The typed new value of
+    this field can be retrieved by reading the settings field in the API response."""
+
+    previous_value: Optional[str] = None
+    """The previous value of this field before enforcing policy compliance (either a number, a boolean,
+    or a string) converted to a string. This is intended to be read by a human. The type of the
+    field can be retrieved by reading the settings field in the API response."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ClusterSettingsChange into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.field is not None: body['field'] = self.field
+        if self.new_value is not None: body['new_value'] = self.new_value
+        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ClusterSettingsChange:
+        """Deserializes the ClusterSettingsChange from a dictionary."""
+        return cls(field=d.get('field', None),
+                   new_value=d.get('new_value', None),
+                   previous_value=d.get('previous_value', None))
+
+
 @dataclass
 class ClusterSize:
     autoscale: Optional[AutoScale] = None
@@ -1561,8 +1634,13 @@ class ClusterSpec:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -1877,8 +1955,13 @@ class CreateCluster:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -2134,10 +2217,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateInstancePoolResponse:
 
 @dataclass
 class CreatePolicy:
-    name: str
-    """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and
-    100 characters."""
-
     definition: Optional[str] = None
     """Policy definition document expressed in [Databricks Cluster Policy Definition Language].
     
@@ -2154,6 +2233,10 @@ class CreatePolicy:
     """Max number of clusters per user that can be active using this policy. If not present, there is
     no max limit."""
 
+    name: Optional[str] = None
+    """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and
+    100 characters."""
+
     policy_family_definition_overrides: Optional[str] = None
     """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
     document must be passed as a string and cannot be embedded in the requests.
@@ -2724,8 +2807,13 @@ class EditCluster:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -2919,10 +3007,6 @@ class EditPolicy:
     policy_id: str
     """The ID of the policy to update."""
 
-    name: str
-    """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and
-    100 characters."""
-
     definition: Optional[str] = None
     """Policy definition document expressed in [Databricks Cluster Policy Definition Language].
     
@@ -2939,6 +3023,10 @@ class EditPolicy:
     """Max number of clusters per user that can be active using this policy. If not present, there is
     no max limit."""
 
+    name: Optional[str] = None
+    """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and
+    100 characters."""
+
     policy_family_definition_overrides: Optional[str] = None
     """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
     document must be passed as a string and cannot be embedded in the requests.
@@ -3010,6 +3098,52 @@ def from_dict(cls, d: Dict[str, any]) -> EditResponse:
         return cls()
 
 
+@dataclass
+class EnforceClusterComplianceRequest:
+    cluster_id: str
+    """The ID of the cluster you want to enforce policy compliance on."""
+
+    validate_only: Optional[bool] = None
+    """If set, previews the changes that would be made to a cluster to enforce compliance but does not
+    update the cluster."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EnforceClusterComplianceRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceRequest:
+        """Deserializes the EnforceClusterComplianceRequest from a dictionary."""
+        return cls(cluster_id=d.get('cluster_id', None), validate_only=d.get('validate_only', None))
+
+
+@dataclass
+class EnforceClusterComplianceResponse:
+    changes: Optional[List[ClusterSettingsChange]] = None
+    """A list of changes that have been made to the cluster settings for the cluster to become
+    compliant with its policy."""
+
+    has_changes: Optional[bool] = None
+    """Whether any changes have been made to the cluster settings for the cluster to become compliant
+    with its policy."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EnforceClusterComplianceResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.changes: body['changes'] = [v.as_dict() for v in self.changes]
+        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceResponse:
+        """Deserializes the EnforceClusterComplianceResponse from a dictionary."""
+        return cls(changes=_repeated_dict(d, 'changes', ClusterSettingsChange),
+                   has_changes=d.get('has_changes', None))
+
+
 @dataclass
 class Environment:
     """The environment entity used to preserve serverless environment side panel and jobs' environment
@@ -3281,6 +3415,30 @@ def from_dict(cls, d: Dict[str, any]) -> GcsStorageInfo:
         return cls(destination=d.get('destination', None))
 
 
+@dataclass
+class GetClusterComplianceResponse:
+    is_compliant: Optional[bool] = None
+    """Whether the cluster is compliant with its policy or not. Clusters could be out of compliance if
+    the policy was updated after the cluster was last edited."""
+
+    violations: Optional[Dict[str, str]] = None
+    """An object containing key-value mappings representing the first 200 policy validation errors. The
+    keys indicate the path where the policy validation error is occurring. The values indicate an
+    error message describing the policy validation error."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GetClusterComplianceResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetClusterComplianceResponse:
+        """Deserializes the GetClusterComplianceResponse from a dictionary."""
+        return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None))
+
+
 @dataclass
 class GetClusterPermissionLevelsResponse:
     permission_levels: Optional[List[ClusterPermissionsDescription]] = None
@@ -4491,11 +4649,8 @@ class Library:
     """Specification of a CRAN library to be installed as part of the library"""
 
     egg: Optional[str] = None
-    """URI of the egg library to install. Supported URIs include Workspace paths, Unity Catalog Volumes
-    paths, and S3 URIs. For example: `{ "egg": "/Workspace/path/to/library.egg" }`, `{ "egg" :
-    "/Volumes/path/to/library.egg" }` or `{ "egg": "s3://my-bucket/library.egg" }`. If S3 is used,
-    please make sure the cluster has read access on the library. You may need to launch the cluster
-    with an IAM role to access the S3 URI."""
+    """Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is
+    not supported in Databricks Runtime 14.0 and above."""
 
     jar: Optional[str] = None
     """URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes
@@ -4633,21 +4788,132 @@ def from_dict(cls, d: Dict[str, any]) -> ListAvailableZonesResponse:
         return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None))
 
 
+@dataclass
+class ListClusterCompliancesResponse:
+    clusters: Optional[List[ClusterCompliance]] = None
+    """A list of clusters and their policy compliance statuses."""
+
+    next_page_token: Optional[str] = None
+    """This field represents the pagination token to retrieve the next page of results. If the value is
+    "", it means no further results for the request."""
+
+    prev_page_token: Optional[str] = None
+    """This field represents the pagination token to retrieve the previous page of results. If the
+    value is "", it means no further results for the request."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListClusterCompliancesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListClusterCompliancesResponse:
+        """Deserializes the ListClusterCompliancesResponse from a dictionary."""
+        return cls(clusters=_repeated_dict(d, 'clusters', ClusterCompliance),
+                   next_page_token=d.get('next_page_token', None),
+                   prev_page_token=d.get('prev_page_token', None))
+
+
+@dataclass
+class ListClustersFilterBy:
+    cluster_sources: Optional[List[ClusterSource]] = None
+    """The source of cluster creation."""
+
+    cluster_states: Optional[List[State]] = None
+    """The current state of the clusters."""
+
+    is_pinned: Optional[bool] = None
+    """Whether the clusters are pinned or not."""
+
+    policy_id: Optional[str] = None
+    """The ID of the cluster policy used to create the cluster if applicable."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListClustersFilterBy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.cluster_sources: body['cluster_sources'] = [v.value for v in self.cluster_sources]
+        if self.cluster_states: body['cluster_states'] = [v.value for v in self.cluster_states]
+        if self.is_pinned is not None: body['is_pinned'] = self.is_pinned
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListClustersFilterBy:
+        """Deserializes the ListClustersFilterBy from a dictionary."""
+        return cls(cluster_sources=_repeated_enum(d, 'cluster_sources', ClusterSource),
+                   cluster_states=_repeated_enum(d, 'cluster_states', State),
+                   is_pinned=d.get('is_pinned', None),
+                   policy_id=d.get('policy_id', None))
+
+
 @dataclass
 class ListClustersResponse:
     clusters: Optional[List[ClusterDetails]] = None
     """"""
 
+    next_page_token: Optional[str] = None
+    """This field represents the pagination token to retrieve the next page of results. If the value is
+    "", it means no further results for the request."""
+
+    prev_page_token: Optional[str] = None
+    """This field represents the pagination token to retrieve the previous page of results. If the
+    value is "", it means no further results for the request."""
+
     def as_dict(self) -> dict:
         """Serializes the ListClustersResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersResponse:
         """Deserializes the ListClustersResponse from a dictionary."""
-        return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails))
+        return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails),
+                   next_page_token=d.get('next_page_token', None),
+                   prev_page_token=d.get('prev_page_token', None))
+
+
+@dataclass
+class ListClustersSortBy:
+    direction: Optional[ListClustersSortByDirection] = None
+    """The direction to sort by."""
+
+    field: Optional[ListClustersSortByField] = None
+    """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest
+    precedence: cluster state, pinned or unpinned, then cluster name."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListClustersSortBy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.direction is not None: body['direction'] = self.direction.value
+        if self.field is not None: body['field'] = self.field.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListClustersSortBy:
+        """Deserializes the ListClustersSortBy from a dictionary."""
+        return cls(direction=_enum(d, 'direction', ListClustersSortByDirection),
+                   field=_enum(d, 'field', ListClustersSortByField))
+
+
+class ListClustersSortByDirection(Enum):
+    """The direction to sort by."""
+
+    ASC = 'ASC'
+    DESC = 'DESC'
+
+
+class ListClustersSortByField(Enum):
+    """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest
+    precedence: cluster state, pinned or unpinned, then cluster name."""
+
+    CLUSTER_NAME = 'CLUSTER_NAME'
+    DEFAULT = 'DEFAULT'
 
 
 @dataclass
@@ -4735,13 +5001,13 @@ def from_dict(cls, d: Dict[str, any]) -> ListPoliciesResponse:
 
 @dataclass
 class ListPolicyFamiliesResponse:
-    policy_families: List[PolicyFamily]
-    """List of policy families."""
-
     next_page_token: Optional[str] = None
     """A token that can be used to get the next page of results. If not present, there are no more
     results to show."""
 
+    policy_families: Optional[List[PolicyFamily]] = None
+    """List of policy families."""
+
     def as_dict(self) -> dict:
         """Serializes the ListPolicyFamiliesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -4763,6 +5029,7 @@ class ListSortColumn(Enum):
 
 
 class ListSortOrder(Enum):
+    """A generic ordering enum for list-based queries."""
 
     ASC = 'ASC'
     DESC = 'DESC'
@@ -5089,6 +5356,8 @@ def from_dict(cls, d: Dict[str, any]) -> PinClusterResponse:
 
 @dataclass
 class Policy:
+    """Describes a Cluster Policy entity."""
+
     created_at_timestamp: Optional[int] = None
     """Creation time. The timestamp (in millisecond) when this Cluster Policy was created."""
 
@@ -5130,7 +5399,11 @@ class Policy:
     [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html"""
 
     policy_family_id: Optional[str] = None
-    """ID of the policy family."""
+    """ID of the policy family. The cluster policy's policy definition inherits the policy family's
+    policy definition.
+    
+    Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize
+    the policy definition."""
 
     policy_id: Optional[str] = None
     """Canonical unique identifier for the Cluster Policy."""
@@ -5170,20 +5443,20 @@ def from_dict(cls, d: Dict[str, any]) -> Policy:
 
 @dataclass
 class PolicyFamily:
-    policy_family_id: str
-    """ID of the policy family."""
-
-    name: str
-    """Name of the policy family."""
-
-    description: str
-    """Human-readable description of the purpose of the policy family."""
-
-    definition: str
+    definition: Optional[str] = None
     """Policy definition document expressed in [Databricks Cluster Policy Definition Language].
     
     [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html"""
 
+    description: Optional[str] = None
+    """Human-readable description of the purpose of the policy family."""
+
+    name: Optional[str] = None
+    """Name of the policy family."""
+
+    policy_family_id: Optional[str] = None
+    """Unique identifier for the policy family."""
+
     def as_dict(self) -> dict:
         """Serializes the PolicyFamily into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -5429,8 +5702,13 @@ def from_dict(cls, d: Dict[str, any]) -> Results:
 
 
 class RuntimeEngine(Enum):
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     NULL = 'NULL'
     PHOTON = 'PHOTON'
@@ -5824,6 +6102,265 @@ def from_dict(cls, d: Dict[str, any]) -> UnpinClusterResponse:
         return cls()
 
 
+@dataclass
+class UpdateCluster:
+    cluster_id: str
+    """ID of the cluster."""
+
+    update_mask: str
+    """Specifies which fields of the cluster will be updated. This is required in the POST request. The
+    update mask should be supplied as a single string. To specify multiple fields, separate them
+    with commas (no spaces). To delete a field from a cluster configuration, add it to the
+    `update_mask` string but omit it from the `cluster` object."""
+
+    cluster: Optional[UpdateClusterResource] = None
+    """The cluster to be updated."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateCluster into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.cluster: body['cluster'] = self.cluster.as_dict()
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCluster:
+        """Deserializes the UpdateCluster from a dictionary."""
+        return cls(cluster=_from_dict(d, 'cluster', UpdateClusterResource),
+                   cluster_id=d.get('cluster_id', None),
+                   update_mask=d.get('update_mask', None))
+
+
+@dataclass
+class UpdateClusterResource:
+    autoscale: Optional[AutoScale] = None
+    """Parameters needed in order to automatically scale clusters up and down based on load. Note:
+    autoscaling works best with DB runtime versions 3.0 or later."""
+
+    autotermination_minutes: Optional[int] = None
+    """Automatically terminates the cluster after it is inactive for this time in minutes. If not set,
+    this cluster will not be automatically terminated. If specified, the threshold must be between
+    10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic
+    termination."""
+
+    aws_attributes: Optional[AwsAttributes] = None
+    """Attributes related to clusters running on Amazon Web Services. If not specified at cluster
+    creation, a set of default values will be used."""
+
+    azure_attributes: Optional[AzureAttributes] = None
+    """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation,
+    a set of default values will be used."""
+
+    cluster_log_conf: Optional[ClusterLogConf] = None
+    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
+    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
+    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
+    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
+    executor logs is `$destination/$clusterId/executor`."""
+
+    cluster_name: Optional[str] = None
+    """Cluster name requested by the user. This doesn't have to be unique. If not specified at
+    creation, the cluster name will be an empty string."""
+
+    custom_tags: Optional[Dict[str, str]] = None
+    """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
+    instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
+    
+    - Currently, Databricks allows at most 45 custom tags
+    
+    - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster
+    tags"""
+
+    data_security_mode: Optional[DataSecurityMode] = None
+    """Data security mode decides what data governance model to use when accessing data from a cluster.
+    
+    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
+    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
+    used by a single user specified in `single_user_name`. Most programming languages, cluster
+    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
+    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
+    cannot see each other's data and credentials. Most data governance features are supported in
+    this mode. But programming languages and cluster features might be limited.
+    
+    The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
+    future Databricks Runtime versions:
+    
+    * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
+    `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
+    concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
+    Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
+    doesn’t have UC nor passthrough enabled."""
+
+    docker_image: Optional[DockerImage] = None
+
+    driver_instance_pool_id: Optional[str] = None
+    """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster
+    uses the instance pool with id (instance_pool_id) if the driver pool is not assigned."""
+
+    driver_node_type_id: Optional[str] = None
+    """The node type of the Spark driver. Note that this field is optional; if unset, the driver node
+    type will be set as the same value as `node_type_id` defined above."""
+
+    enable_elastic_disk: Optional[bool] = None
+    """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
+    space when its Spark workers are running low on disk space. This feature requires specific AWS
+    permissions to function correctly - refer to the User Guide for more details."""
+
+    enable_local_disk_encryption: Optional[bool] = None
+    """Whether to enable LUKS on cluster VMs' local disks"""
+
+    gcp_attributes: Optional[GcpAttributes] = None
+    """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
+    creation, a set of default values will be used."""
+
+    init_scripts: Optional[List[InitScriptInfo]] = None
+    """The configuration for storing init scripts. Any number of destinations can be specified. The
+    scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified,
+    init script logs are sent to `//init_scripts`."""
+
+    instance_pool_id: Optional[str] = None
+    """The optional ID of the instance pool to which the cluster belongs."""
+
+    node_type_id: Optional[str] = None
+    """This field encodes, through a single value, the resources available to each of the Spark nodes
+    in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
+    compute intensive workloads. A list of available node types can be retrieved by using the
+    :method:clusters/listNodeTypes API call."""
+
+    num_workers: Optional[int] = None
+    """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
+    `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
+    
+    Note: When reading the properties of a cluster, this field reflects the desired number of
+    workers rather than the actual current number of workers. For instance, if a cluster is resized
+    from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10
+    workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the
+    new nodes are provisioned."""
+
+    policy_id: Optional[str] = None
+    """The ID of the cluster policy used to create the cluster if applicable."""
+
+    runtime_engine: Optional[RuntimeEngine] = None
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
+
+    single_user_name: Optional[str] = None
+    """Single user name if data_security_mode is `SINGLE_USER`"""
+
+    spark_conf: Optional[Dict[str, str]] = None
+    """An object containing a set of optional, user-specified Spark configuration key-value pairs.
+    Users can also pass in a string of extra JVM options to the driver and the executors via
+    `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively."""
+
+    spark_env_vars: Optional[Dict[str, str]] = None
+    """An object containing a set of optional, user-specified environment variable key-value pairs.
+    Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`)
+    while launching the driver and workers.
+    
+    In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them
+    to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default
+    databricks managed environmental variables are included as well.
+    
+    Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
+    "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
+    -Dspark.shuffle.service.enabled=true"}`"""
+
+    spark_version: Optional[str] = None
+    """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can
+    be retrieved by using the :method:clusters/sparkVersions API call."""
+
+    ssh_public_keys: Optional[List[str]] = None
+    """SSH public key contents that will be added to each Spark node in this cluster. The corresponding
+    private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
+    be specified."""
+
+    workload_type: Optional[WorkloadType] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateClusterResource into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value
+        if self.docker_image: body['docker_image'] = self.docker_image.as_dict()
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
+        if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource:
+        """Deserializes the UpdateClusterResource from a dictionary."""
+        return cls(autoscale=_from_dict(d, 'autoscale', AutoScale),
+                   autotermination_minutes=d.get('autotermination_minutes', None),
+                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
+                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
+                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
+                   cluster_name=d.get('cluster_name', None),
+                   custom_tags=d.get('custom_tags', None),
+                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
+                   docker_image=_from_dict(d, 'docker_image', DockerImage),
+                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
+                   driver_node_type_id=d.get('driver_node_type_id', None),
+                   enable_elastic_disk=d.get('enable_elastic_disk', None),
+                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
+                   gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
+                   init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
+                   instance_pool_id=d.get('instance_pool_id', None),
+                   node_type_id=d.get('node_type_id', None),
+                   num_workers=d.get('num_workers', None),
+                   policy_id=d.get('policy_id', None),
+                   runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
+                   single_user_name=d.get('single_user_name', None),
+                   spark_conf=d.get('spark_conf', None),
+                   spark_env_vars=d.get('spark_env_vars', None),
+                   spark_version=d.get('spark_version', None),
+                   ssh_public_keys=d.get('ssh_public_keys', None),
+                   workload_type=_from_dict(d, 'workload_type', WorkloadType))
+
+
+@dataclass
+class UpdateClusterResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateClusterResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResponse:
+        """Deserializes the UpdateClusterResponse from a dictionary."""
+        return cls()
+
+
 @dataclass
 class UpdateResponse:
 
@@ -5912,21 +6449,18 @@ def __init__(self, api_client):
         self._api = api_client
 
     def create(self,
-               name: str,
                *,
                definition: Optional[str] = None,
                description: Optional[str] = None,
                libraries: Optional[List[Library]] = None,
                max_clusters_per_user: Optional[int] = None,
+               name: Optional[str] = None,
                policy_family_definition_overrides: Optional[str] = None,
                policy_family_id: Optional[str] = None) -> CreatePolicyResponse:
         """Create a new policy.
         
         Creates a new policy with prescribed settings.
         
-        :param name: str
-          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
-          characters.
         :param definition: str (optional)
           Policy definition document expressed in [Databricks Cluster Policy Definition Language].
           
@@ -5939,6 +6473,9 @@ def create(self,
         :param max_clusters_per_user: int (optional)
           Max number of clusters per user that can be active using this policy. If not present, there is no
           max limit.
+        :param name: str (optional)
+          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
+          characters.
         :param policy_family_definition_overrides: str (optional)
           Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
           document must be passed as a string and cannot be embedded in the requests.
@@ -5988,12 +6525,12 @@ def delete(self, policy_id: str):
 
     def edit(self,
              policy_id: str,
-             name: str,
              *,
              definition: Optional[str] = None,
              description: Optional[str] = None,
              libraries: Optional[List[Library]] = None,
              max_clusters_per_user: Optional[int] = None,
+             name: Optional[str] = None,
              policy_family_definition_overrides: Optional[str] = None,
              policy_family_id: Optional[str] = None):
         """Update a cluster policy.
@@ -6003,9 +6540,6 @@ def edit(self,
         
         :param policy_id: str
           The ID of the policy to update.
-        :param name: str
-          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
-          characters.
         :param definition: str (optional)
           Policy definition document expressed in [Databricks Cluster Policy Definition Language].
           
@@ -6018,6 +6552,9 @@ def edit(self,
         :param max_clusters_per_user: int (optional)
           Max number of clusters per user that can be active using this policy. If not present, there is no
           max limit.
+        :param name: str (optional)
+          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
+          characters.
         :param policy_family_definition_overrides: str (optional)
           Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
           document must be passed as a string and cannot be embedded in the requests.
@@ -6055,7 +6592,7 @@ def get(self, policy_id: str) -> Policy:
         Get a cluster policy entity. Creation and editing is available to admins only.
         
         :param policy_id: str
-          Canonical unique identifier for the cluster policy.
+          Canonical unique identifier for the Cluster Policy.
         
         :returns: :class:`Policy`
         """
@@ -6205,9 +6742,8 @@ class ClustersAPI:
     restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive
     analysis.
     
-    IMPORTANT: Databricks retains cluster configuration information for up to 200 all-purpose clusters
-    terminated in the last 30 days and up to 30 job clusters recently terminated by the job scheduler. To keep
-    an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
+    IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To
+    keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
     administrator can pin a cluster to the cluster list."""
 
     def __init__(self, api_client):
@@ -6294,7 +6830,7 @@ def change_owner(self, cluster_id: str, owner_username: str):
         if owner_username is not None: body['owner_username'] = owner_username
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/clusters/change-owner', body=body, headers=headers)
+        self._api.do('POST', '/api/2.1/clusters/change-owner', body=body, headers=headers)
 
     def create(self,
                spark_version: str,
@@ -6335,6 +6871,11 @@ def create(self,
         If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
         Otherwise the cluster will terminate with an informative error message.
         
+        Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
+        the [create compute UI] and then copying the generated JSON definition from the UI.
+        
+        [create compute UI]: https://docs.databricks.com/compute/configure.html
+        
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -6430,8 +6971,13 @@ def create(self,
         :param policy_id: str (optional)
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
-          is inferred from spark_version.
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
           Single user name if data_security_mode is `SINGLE_USER`
         :param spark_conf: Dict[str,str] (optional)
@@ -6493,7 +7039,7 @@ def create(self,
         if workload_type is not None: body['workload_type'] = workload_type.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/clusters/create', body=body, headers=headers)
+        op_response = self._api.do('POST', '/api/2.1/clusters/create', body=body, headers=headers)
         return Wait(self.wait_get_cluster_running,
                     response=CreateClusterResponse.from_dict(op_response),
                     cluster_id=op_response['cluster_id'])
@@ -6577,7 +7123,7 @@ def delete(self, cluster_id: str) -> Wait[ClusterDetails]:
         if cluster_id is not None: body['cluster_id'] = cluster_id
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/clusters/delete', body=body, headers=headers)
+        op_response = self._api.do('POST', '/api/2.1/clusters/delete', body=body, headers=headers)
         return Wait(self.wait_get_cluster_terminated,
                     response=DeleteClusterResponse.from_dict(op_response),
                     cluster_id=cluster_id)
@@ -6724,8 +7270,13 @@ def edit(self,
         :param policy_id: str (optional)
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
-          is inferred from spark_version.
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
           Single user name if data_security_mode is `SINGLE_USER`
         :param spark_conf: Dict[str,str] (optional)
@@ -6787,7 +7338,7 @@ def edit(self,
         if workload_type is not None: body['workload_type'] = workload_type.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/clusters/edit', body=body, headers=headers)
+        op_response = self._api.do('POST', '/api/2.1/clusters/edit', body=body, headers=headers)
         return Wait(self.wait_get_cluster_running,
                     response=EditClusterResponse.from_dict(op_response),
                     cluster_id=cluster_id)
@@ -6898,7 +7449,7 @@ def events(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         while True:
-            json = self._api.do('POST', '/api/2.0/clusters/events', body=body, headers=headers)
+            json = self._api.do('POST', '/api/2.1/clusters/events', body=body, headers=headers)
             if 'events' in json:
                 for v in json['events']:
                     yield ClusterEvent.from_dict(v)
@@ -6922,7 +7473,7 @@ def get(self, cluster_id: str) -> ClusterDetails:
         if cluster_id is not None: query['cluster_id'] = cluster_id
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET', '/api/2.0/clusters/get', query=query, headers=headers)
+        res = self._api.do('GET', '/api/2.1/clusters/get', query=query, headers=headers)
         return ClusterDetails.from_dict(res)
 
     def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsResponse:
@@ -6959,33 +7510,46 @@ def get_permissions(self, cluster_id: str) -> ClusterPermissions:
         res = self._api.do('GET', f'/api/2.0/permissions/clusters/{cluster_id}', headers=headers)
         return ClusterPermissions.from_dict(res)
 
-    def list(self, *, can_use_client: Optional[str] = None) -> Iterator[ClusterDetails]:
-        """List all clusters.
-        
-        Return information about all pinned clusters, active clusters, up to 200 of the most recently
-        terminated all-purpose clusters in the past 30 days, and up to 30 of the most recently terminated job
-        clusters in the past 30 days.
-        
-        For example, if there is 1 pinned cluster, 4 active clusters, 45 terminated all-purpose clusters in
-        the past 30 days, and 50 terminated job clusters in the past 30 days, then this API returns the 1
-        pinned cluster, 4 active clusters, all 45 terminated all-purpose clusters, and the 30 most recently
-        terminated job clusters.
-        
-        :param can_use_client: str (optional)
-          Filter clusters based on what type of client it can be used for. Could be either NOTEBOOKS or JOBS.
-          No input for this field will get all clusters in the workspace without filtering on its supported
-          client
+    def list(self,
+             *,
+             filter_by: Optional[ListClustersFilterBy] = None,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None,
+             sort_by: Optional[ListClustersSortBy] = None) -> Iterator[ClusterDetails]:
+        """List clusters.
+        
+        Return information about all pinned and active clusters, and all clusters terminated within the last
+        30 days. Clusters terminated prior to this period are not included.
+        
+        :param filter_by: :class:`ListClustersFilterBy` (optional)
+          Filters to apply to the list of clusters.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          Use next_page_token or prev_page_token returned from the previous request to list the next or
+          previous page of clusters respectively.
+        :param sort_by: :class:`ListClustersSortBy` (optional)
+          Sort the list of clusters by a specific criteria.
         
         :returns: Iterator over :class:`ClusterDetails`
         """
 
         query = {}
-        if can_use_client is not None: query['can_use_client'] = can_use_client
+        if filter_by is not None: query['filter_by'] = filter_by.as_dict()
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        if sort_by is not None: query['sort_by'] = sort_by.as_dict()
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET', '/api/2.0/clusters/list', query=query, headers=headers)
-        parsed = ListClustersResponse.from_dict(json).clusters
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do('GET', '/api/2.1/clusters/list', query=query, headers=headers)
+            if 'clusters' in json:
+                for v in json['clusters']:
+                    yield ClusterDetails.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
     def list_node_types(self) -> ListNodeTypesResponse:
         """List node types.
@@ -6997,7 +7561,7 @@ def list_node_types(self) -> ListNodeTypesResponse:
 
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET', '/api/2.0/clusters/list-node-types', headers=headers)
+        res = self._api.do('GET', '/api/2.1/clusters/list-node-types', headers=headers)
         return ListNodeTypesResponse.from_dict(res)
 
     def list_zones(self) -> ListAvailableZonesResponse:
@@ -7011,7 +7575,7 @@ def list_zones(self) -> ListAvailableZonesResponse:
 
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET', '/api/2.0/clusters/list-zones', headers=headers)
+        res = self._api.do('GET', '/api/2.1/clusters/list-zones', headers=headers)
         return ListAvailableZonesResponse.from_dict(res)
 
     def permanent_delete(self, cluster_id: str):
@@ -7032,7 +7596,7 @@ def permanent_delete(self, cluster_id: str):
         if cluster_id is not None: body['cluster_id'] = cluster_id
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/clusters/permanent-delete', body=body, headers=headers)
+        self._api.do('POST', '/api/2.1/clusters/permanent-delete', body=body, headers=headers)
 
     def pin(self, cluster_id: str):
         """Pin cluster.
@@ -7049,7 +7613,7 @@ def pin(self, cluster_id: str):
         if cluster_id is not None: body['cluster_id'] = cluster_id
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/clusters/pin', body=body, headers=headers)
+        self._api.do('POST', '/api/2.1/clusters/pin', body=body, headers=headers)
 
     def resize(self,
                cluster_id: str,
@@ -7086,7 +7650,7 @@ def resize(self,
         if num_workers is not None: body['num_workers'] = num_workers
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/clusters/resize', body=body, headers=headers)
+        op_response = self._api.do('POST', '/api/2.1/clusters/resize', body=body, headers=headers)
         return Wait(self.wait_get_cluster_running,
                     response=ResizeClusterResponse.from_dict(op_response),
                     cluster_id=cluster_id)
@@ -7120,7 +7684,7 @@ def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wai
         if restart_user is not None: body['restart_user'] = restart_user
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/clusters/restart', body=body, headers=headers)
+        op_response = self._api.do('POST', '/api/2.1/clusters/restart', body=body, headers=headers)
         return Wait(self.wait_get_cluster_running,
                     response=RestartClusterResponse.from_dict(op_response),
                     cluster_id=cluster_id)
@@ -7165,7 +7729,7 @@ def spark_versions(self) -> GetSparkVersionsResponse:
 
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET', '/api/2.0/clusters/spark-versions', headers=headers)
+        res = self._api.do('GET', '/api/2.1/clusters/spark-versions', headers=headers)
         return GetSparkVersionsResponse.from_dict(res)
 
     def start(self, cluster_id: str) -> Wait[ClusterDetails]:
@@ -7189,7 +7753,7 @@ def start(self, cluster_id: str) -> Wait[ClusterDetails]:
         if cluster_id is not None: body['cluster_id'] = cluster_id
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/clusters/start', body=body, headers=headers)
+        op_response = self._api.do('POST', '/api/2.1/clusters/start', body=body, headers=headers)
         return Wait(self.wait_get_cluster_running,
                     response=StartClusterResponse.from_dict(op_response),
                     cluster_id=cluster_id)
@@ -7213,7 +7777,58 @@ def unpin(self, cluster_id: str):
         if cluster_id is not None: body['cluster_id'] = cluster_id
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/clusters/unpin', body=body, headers=headers)
+        self._api.do('POST', '/api/2.1/clusters/unpin', body=body, headers=headers)
+
+    def update(self,
+               cluster_id: str,
+               update_mask: str,
+               *,
+               cluster: Optional[UpdateClusterResource] = None) -> Wait[ClusterDetails]:
+        """Update cluster configuration (partial).
+        
+        Updates the configuration of a cluster to match the partial set of attributes and size. Denote which
+        fields to update using the `update_mask` field in the request body. A cluster can be updated if it is
+        in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be
+        restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED`
+        state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster
+        is started using the `clusters/start` API. Attempts to update a cluster in any other state will be
+        rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be
+        updated.
+        
+        :param cluster_id: str
+          ID of the cluster.
+        :param update_mask: str
+          Specifies which fields of the cluster will be updated. This is required in the POST request. The
+          update mask should be supplied as a single string. To specify multiple fields, separate them with
+          commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask`
+          string but omit it from the `cluster` object.
+        :param cluster: :class:`UpdateClusterResource` (optional)
+          The cluster to be updated.
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_running for more details.
+        """
+        body = {}
+        if cluster is not None: body['cluster'] = cluster.as_dict()
+        if cluster_id is not None: body['cluster_id'] = cluster_id
+        if update_mask is not None: body['update_mask'] = update_mask
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do('POST', '/api/2.1/clusters/update', body=body, headers=headers)
+        return Wait(self.wait_get_cluster_running,
+                    response=UpdateClusterResponse.from_dict(op_response),
+                    cluster_id=cluster_id)
+
+    def update_and_wait(
+        self,
+        cluster_id: str,
+        update_mask: str,
+        *,
+        cluster: Optional[UpdateClusterResource] = None,
+        timeout=timedelta(minutes=20)) -> ClusterDetails:
+        return self.update(cluster=cluster, cluster_id=cluster_id,
+                           update_mask=update_mask).result(timeout=timeout)
 
     def update_permissions(
             self,
@@ -7240,7 +7855,8 @@ def update_permissions(
 
 
 class CommandExecutionAPI:
-    """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters."""
+    """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API
+    only supports (classic) all-purpose clusters. Serverless compute is not supported."""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -8211,6 +8827,116 @@ def uninstall(self, cluster_id: str, libraries: List[Library]):
         self._api.do('POST', '/api/2.0/libraries/uninstall', body=body, headers=headers)
 
 
+class PolicyComplianceForClustersAPI:
+    """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your
+    workspace.
+    
+    A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could
+    be out of compliance if their policy was updated after the cluster was last edited.
+    
+    The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce
+    compliance API allows you to update a cluster to be compliant with the current version of its policy."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def enforce_compliance(self,
+                           cluster_id: str,
+                           *,
+                           validate_only: Optional[bool] = None) -> EnforceClusterComplianceResponse:
+        """Enforce cluster policy compliance.
+        
+        Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if
+        it is in a `RUNNING` or `TERMINATED` state.
+        
+        If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
+        can take effect.
+        
+        If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the
+        cluster is started, the new attributes will take effect.
+        
+        Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API.
+        Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
+        
+        :param cluster_id: str
+          The ID of the cluster you want to enforce policy compliance on.
+        :param validate_only: bool (optional)
+          If set, previews the changes that would be made to a cluster to enforce compliance but does not
+          update the cluster.
+        
+        :returns: :class:`EnforceClusterComplianceResponse`
+        """
+        body = {}
+        if cluster_id is not None: body['cluster_id'] = cluster_id
+        if validate_only is not None: body['validate_only'] = validate_only
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           '/api/2.0/policies/clusters/enforce-compliance',
+                           body=body,
+                           headers=headers)
+        return EnforceClusterComplianceResponse.from_dict(res)
+
+    def get_compliance(self, cluster_id: str) -> GetClusterComplianceResponse:
+        """Get cluster policy compliance.
+        
+        Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy
+        was updated after the cluster was last edited.
+        
+        :param cluster_id: str
+          The ID of the cluster to get the compliance status
+        
+        :returns: :class:`GetClusterComplianceResponse`
+        """
+
+        query = {}
+        if cluster_id is not None: query['cluster_id'] = cluster_id
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', '/api/2.0/policies/clusters/get-compliance', query=query, headers=headers)
+        return GetClusterComplianceResponse.from_dict(res)
+
+    def list_compliance(self,
+                        policy_id: str,
+                        *,
+                        page_size: Optional[int] = None,
+                        page_token: Optional[str] = None) -> Iterator[ClusterCompliance]:
+        """List cluster policy compliance.
+        
+        Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of
+        compliance if their policy was updated after the cluster was last edited.
+        
+        :param policy_id: str
+          Canonical unique identifier for the cluster policy.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          A page token that can be used to navigate to the next page or previous page as returned by
+          `next_page_token` or `prev_page_token`.
+        
+        :returns: Iterator over :class:`ClusterCompliance`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        if policy_id is not None: query['policy_id'] = policy_id
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                '/api/2.0/policies/clusters/list-compliance',
+                                query=query,
+                                headers=headers)
+            if 'clusters' in json:
+                for v in json['clusters']:
+                    yield ClusterCompliance.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+
 class PolicyFamiliesAPI:
     """View available policy families. A policy family contains a policy definition providing best practices for
     configuring clusters for a particular use case.
@@ -8225,19 +8951,27 @@ class PolicyFamiliesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def get(self, policy_family_id: str) -> PolicyFamily:
+    def get(self, policy_family_id: str, *, version: Optional[int] = None) -> PolicyFamily:
         """Get policy family information.
         
-        Retrieve the information for an policy family based on its identifier.
+        Retrieve the information for an policy family based on its identifier and version
         
         :param policy_family_id: str
+          The family ID about which to retrieve information.
+        :param version: int (optional)
+          The version number for the family to fetch. Defaults to the latest version.
         
         :returns: :class:`PolicyFamily`
         """
 
+        query = {}
+        if version is not None: query['version'] = version
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET', f'/api/2.0/policy-families/{policy_family_id}', headers=headers)
+        res = self._api.do('GET',
+                           f'/api/2.0/policy-families/{policy_family_id}',
+                           query=query,
+                           headers=headers)
         return PolicyFamily.from_dict(res)
 
     def list(self,
@@ -8246,10 +8980,11 @@ def list(self,
              page_token: Optional[str] = None) -> Iterator[PolicyFamily]:
         """List policy families.
         
-        Retrieve a list of policy families. This API is paginated.
+        Returns the list of policy definition types available to use at their latest version. This API is
+        paginated.
         
         :param max_results: int (optional)
-          The max number of policy families to return.
+          Maximum number of policy families to return.
         :param page_token: str (optional)
           A token that can be used to get the next page of results.
         
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index b24d03183..4a4c640e6 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -3,14 +3,20 @@
 from __future__ import annotations
 
 import logging
+import random
+import time
 from dataclasses import dataclass
+from datetime import timedelta
 from enum import Enum
-from typing import Dict, Iterator, List, Optional
+from typing import Callable, Dict, Iterator, List, Optional
 
-from ._internal import _enum, _from_dict, _repeated_dict
+from ..errors import OperationFailed
+from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
 _LOG = logging.getLogger('databricks.sdk')
 
+from databricks.sdk.service import sql
+
 # all definitions in this file are in alphabetical order
 
 
@@ -21,10 +27,15 @@ class CreateDashboardRequest:
 
     parent_path: Optional[str] = None
     """The workspace path of the folder containing the dashboard. Includes leading slash and no
-    trailing slash."""
+    trailing slash. This field is excluded in List Dashboards responses."""
 
     serialized_dashboard: Optional[str] = None
-    """The contents of the dashboard in serialized string form."""
+    """The contents of the dashboard in serialized string form. This field is excluded in List
+    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
+    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
+    that represents the dashboard's layout and components.
+    
+    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
 
     warehouse_id: Optional[str] = None
     """The warehouse ID used to run the dashboard."""
@@ -148,23 +159,30 @@ class Dashboard:
 
     etag: Optional[str] = None
     """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard
-    has not been modified since the last read."""
+    has not been modified since the last read. This field is excluded in List Dashboards responses."""
 
     lifecycle_state: Optional[LifecycleState] = None
     """The state of the dashboard resource. Used for tracking trashed status."""
 
     parent_path: Optional[str] = None
     """The workspace path of the folder containing the dashboard. Includes leading slash and no
-    trailing slash."""
+    trailing slash. This field is excluded in List Dashboards responses."""
 
     path: Optional[str] = None
-    """The workspace path of the dashboard asset, including the file name."""
+    """The workspace path of the dashboard asset, including the file name. Exported dashboards always
+    have the file extension `.lvdash.json`. This field is excluded in List Dashboards responses."""
 
     serialized_dashboard: Optional[str] = None
-    """The contents of the dashboard in serialized string form."""
+    """The contents of the dashboard in serialized string form. This field is excluded in List
+    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
+    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
+    that represents the dashboard's layout and components.
+    
+    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
 
     update_time: Optional[str] = None
-    """The timestamp of when the dashboard was last updated by the user."""
+    """The timestamp of when the dashboard was last updated by the user. This field is excluded in List
+    Dashboards responses."""
 
     warehouse_id: Optional[str] = None
     """The warehouse ID used to run the dashboard."""
@@ -202,7 +220,6 @@ def from_dict(cls, d: Dict[str, any]) -> Dashboard:
 class DashboardView(Enum):
 
     DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC'
-    DASHBOARD_VIEW_FULL = 'DASHBOARD_VIEW_FULL'
 
 
 @dataclass
@@ -233,6 +250,245 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteSubscriptionResponse:
         return cls()
 
 
+@dataclass
+class GenieAttachment:
+    """Genie AI Response"""
+
+    query: Optional[QueryAttachment] = None
+
+    text: Optional[TextAttachment] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the GenieAttachment into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.query: body['query'] = self.query.as_dict()
+        if self.text: body['text'] = self.text.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenieAttachment:
+        """Deserializes the GenieAttachment from a dictionary."""
+        return cls(query=_from_dict(d, 'query', QueryAttachment), text=_from_dict(d, 'text', TextAttachment))
+
+
+@dataclass
+class GenieConversation:
+    id: str
+    """Conversation ID"""
+
+    space_id: str
+    """Genie space ID"""
+
+    user_id: int
+    """ID of the user who created the conversation"""
+
+    title: str
+    """Conversation title"""
+
+    created_timestamp: Optional[int] = None
+    """Timestamp when the message was created"""
+
+    last_updated_timestamp: Optional[int] = None
+    """Timestamp when the message was last updated"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenieConversation into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.title is not None: body['title'] = self.title
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenieConversation:
+        """Deserializes the GenieConversation from a dictionary."""
+        return cls(created_timestamp=d.get('created_timestamp', None),
+                   id=d.get('id', None),
+                   last_updated_timestamp=d.get('last_updated_timestamp', None),
+                   space_id=d.get('space_id', None),
+                   title=d.get('title', None),
+                   user_id=d.get('user_id', None))
+
+
+@dataclass
+class GenieCreateConversationMessageRequest:
+    content: str
+    """User message content."""
+
+    conversation_id: Optional[str] = None
+    """The ID associated with the conversation."""
+
+    space_id: Optional[str] = None
+    """The ID associated with the Genie space where the conversation is started."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenieCreateConversationMessageRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.space_id is not None: body['space_id'] = self.space_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenieCreateConversationMessageRequest:
+        """Deserializes the GenieCreateConversationMessageRequest from a dictionary."""
+        return cls(content=d.get('content', None),
+                   conversation_id=d.get('conversation_id', None),
+                   space_id=d.get('space_id', None))
+
+
+@dataclass
+class GenieGetMessageQueryResultResponse:
+    statement_response: Optional[sql.StatementResponse] = None
+    """SQL Statement Execution response. See [Get status, manifest, and result first
+    chunk](:method:statementexecution/getstatement) for more details."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenieGetMessageQueryResultResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.statement_response: body['statement_response'] = self.statement_response.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenieGetMessageQueryResultResponse:
+        """Deserializes the GenieGetMessageQueryResultResponse from a dictionary."""
+        return cls(statement_response=_from_dict(d, 'statement_response', sql.StatementResponse))
+
+
+@dataclass
+class GenieMessage:
+    id: str
+    """Message ID"""
+
+    space_id: str
+    """Genie space ID"""
+
+    conversation_id: str
+    """Conversation ID"""
+
+    content: str
+    """User message content"""
+
+    attachments: Optional[List[GenieAttachment]] = None
+    """AI produced response to the message"""
+
+    created_timestamp: Optional[int] = None
+    """Timestamp when the message was created"""
+
+    error: Optional[MessageError] = None
+    """Error message if AI failed to respond to the message"""
+
+    last_updated_timestamp: Optional[int] = None
+    """Timestamp when the message was last updated"""
+
+    query_result: Optional[Result] = None
+    """The result of SQL query if the message has a query attachment"""
+
+    status: Optional[MessageStatus] = None
+    """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
+    sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
+    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
+    Executing AI provided SQL query. Get the SQL query result by calling
+    [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
+    status will stay in the `EXECUTING_QUERY` until a client calls
+    [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
+    response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message
+    processing is completed. Results are in the `attachments` field. Get the SQL query result by
+    calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message
+    has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user
+    needs to execute the query again. * `CANCELLED`: Message has been cancelled."""
+
+    user_id: Optional[int] = None
+    """ID of the user who created the message"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenieMessage into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.attachments: body['attachments'] = [v.as_dict() for v in self.attachments]
+        if self.content is not None: body['content'] = self.content
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
+        if self.error: body['error'] = self.error.as_dict()
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.query_result: body['query_result'] = self.query_result.as_dict()
+        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.status is not None: body['status'] = self.status.value
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenieMessage:
+        """Deserializes the GenieMessage from a dictionary."""
+        return cls(attachments=_repeated_dict(d, 'attachments', GenieAttachment),
+                   content=d.get('content', None),
+                   conversation_id=d.get('conversation_id', None),
+                   created_timestamp=d.get('created_timestamp', None),
+                   error=_from_dict(d, 'error', MessageError),
+                   id=d.get('id', None),
+                   last_updated_timestamp=d.get('last_updated_timestamp', None),
+                   query_result=_from_dict(d, 'query_result', Result),
+                   space_id=d.get('space_id', None),
+                   status=_enum(d, 'status', MessageStatus),
+                   user_id=d.get('user_id', None))
+
+
+@dataclass
+class GenieStartConversationMessageRequest:
+    content: str
+    """The text of the message that starts the conversation."""
+
+    space_id: Optional[str] = None
+    """The ID associated with the Genie space where you want to start a conversation."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenieStartConversationMessageRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.space_id is not None: body['space_id'] = self.space_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationMessageRequest:
+        """Deserializes the GenieStartConversationMessageRequest from a dictionary."""
+        return cls(content=d.get('content', None), space_id=d.get('space_id', None))
+
+
+@dataclass
+class GenieStartConversationResponse:
+    message_id: str
+    """Message ID"""
+
+    conversation_id: str
+    """Conversation ID"""
+
+    conversation: Optional[GenieConversation] = None
+
+    message: Optional[GenieMessage] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the GenieStartConversationResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.conversation: body['conversation'] = self.conversation.as_dict()
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.message: body['message'] = self.message.as_dict()
+        if self.message_id is not None: body['message_id'] = self.message_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse:
+        """Deserializes the GenieStartConversationResponse from a dictionary."""
+        return cls(conversation=_from_dict(d, 'conversation', GenieConversation),
+                   conversation_id=d.get('conversation_id', None),
+                   message=_from_dict(d, 'message', GenieMessage),
+                   message_id=d.get('message_id', None))
+
+
 class LifecycleState(Enum):
 
     ACTIVE = 'ACTIVE'
@@ -305,6 +561,93 @@ def from_dict(cls, d: Dict[str, any]) -> ListSubscriptionsResponse:
                    subscriptions=_repeated_dict(d, 'subscriptions', Subscription))
 
 
+@dataclass
+class MessageError:
+    error: Optional[str] = None
+
+    type: Optional[MessageErrorType] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the MessageError into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.error is not None: body['error'] = self.error
+        if self.type is not None: body['type'] = self.type.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> MessageError:
+        """Deserializes the MessageError from a dictionary."""
+        return cls(error=d.get('error', None), type=_enum(d, 'type', MessageErrorType))
+
+
+class MessageErrorType(Enum):
+
+    BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = 'BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION'
+    CHAT_COMPLETION_CLIENT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_EXCEPTION'
+    CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION'
+    CHAT_COMPLETION_NETWORK_EXCEPTION = 'CHAT_COMPLETION_NETWORK_EXCEPTION'
+    CONTENT_FILTER_EXCEPTION = 'CONTENT_FILTER_EXCEPTION'
+    CONTEXT_EXCEEDED_EXCEPTION = 'CONTEXT_EXCEEDED_EXCEPTION'
+    COULD_NOT_GET_UC_SCHEMA_EXCEPTION = 'COULD_NOT_GET_UC_SCHEMA_EXCEPTION'
+    DEPLOYMENT_NOT_FOUND_EXCEPTION = 'DEPLOYMENT_NOT_FOUND_EXCEPTION'
+    FUNCTIONS_NOT_AVAILABLE_EXCEPTION = 'FUNCTIONS_NOT_AVAILABLE_EXCEPTION'
+    FUNCTION_ARGUMENTS_INVALID_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_EXCEPTION'
+    FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION'
+    FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = 'FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION'
+    GENERIC_CHAT_COMPLETION_EXCEPTION = 'GENERIC_CHAT_COMPLETION_EXCEPTION'
+    GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = 'GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION'
+    GENERIC_SQL_EXEC_API_CALL_EXCEPTION = 'GENERIC_SQL_EXEC_API_CALL_EXCEPTION'
+    ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = 'ILLEGAL_PARAMETER_DEFINITION_EXCEPTION'
+    INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION'
+    INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION'
+    INVALID_CHAT_COMPLETION_JSON_EXCEPTION = 'INVALID_CHAT_COMPLETION_JSON_EXCEPTION'
+    INVALID_COMPLETION_REQUEST_EXCEPTION = 'INVALID_COMPLETION_REQUEST_EXCEPTION'
+    INVALID_FUNCTION_CALL_EXCEPTION = 'INVALID_FUNCTION_CALL_EXCEPTION'
+    INVALID_TABLE_IDENTIFIER_EXCEPTION = 'INVALID_TABLE_IDENTIFIER_EXCEPTION'
+    LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION'
+    MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION'
+    MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION'
+    NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION'
+    NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION'
+    RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION'
+    RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = 'RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION'
+    REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION'
+    RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION'
+    SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION'
+    TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION'
+    TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION'
+    TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION'
+    UNEXPECTED_REPLY_PROCESS_EXCEPTION = 'UNEXPECTED_REPLY_PROCESS_EXCEPTION'
+    UNKNOWN_AI_MODEL = 'UNKNOWN_AI_MODEL'
+    WAREHOUSE_ACCESS_MISSING_EXCEPTION = 'WAREHOUSE_ACCESS_MISSING_EXCEPTION'
+    WAREHOUSE_NOT_FOUND_EXCEPTION = 'WAREHOUSE_NOT_FOUND_EXCEPTION'
+
+
+class MessageStatus(Enum):
+    """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
+    sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
+    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
+    Executing AI provided SQL query. Get the SQL query result by calling
+    [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
+    status will stay in the `EXECUTING_QUERY` until a client calls
+    [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
+    response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message
+    processing is completed. Results are in the `attachments` field. Get the SQL query result by
+    calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message
+    has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user
+    needs to execute the query again. * `CANCELLED`: Message has been cancelled."""
+
+    ASKING_AI = 'ASKING_AI'
+    CANCELLED = 'CANCELLED'
+    COMPLETED = 'COMPLETED'
+    EXECUTING_QUERY = 'EXECUTING_QUERY'
+    FAILED = 'FAILED'
+    FETCHING_METADATA = 'FETCHING_METADATA'
+    FILTERING_CONTEXT = 'FILTERING_CONTEXT'
+    QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED'
+    SUBMITTED = 'SUBMITTED'
+
+
 @dataclass
 class MigrateDashboardRequest:
     source_dashboard_id: str
@@ -392,6 +735,82 @@ def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard:
                    warehouse_id=d.get('warehouse_id', None))
 
 
+@dataclass
+class QueryAttachment:
+    description: Optional[str] = None
+    """Description of the query"""
+
+    id: Optional[str] = None
+
+    instruction_id: Optional[str] = None
+    """If the query was created on an instruction (trusted asset) we link to the id"""
+
+    instruction_title: Optional[str] = None
+    """Always store the title next to the id in case the original instruction title changes or the
+    instruction is deleted."""
+
+    last_updated_timestamp: Optional[int] = None
+    """Time when the user updated the query last"""
+
+    query: Optional[str] = None
+    """AI generated SQL query"""
+
+    title: Optional[str] = None
+    """Name of the query"""
+
+    def as_dict(self) -> dict:
+        """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.instruction_id is not None: body['instruction_id'] = self.instruction_id
+        if self.instruction_title is not None: body['instruction_title'] = self.instruction_title
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.query is not None: body['query'] = self.query
+        if self.title is not None: body['title'] = self.title
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QueryAttachment:
+        """Deserializes the QueryAttachment from a dictionary."""
+        return cls(description=d.get('description', None),
+                   id=d.get('id', None),
+                   instruction_id=d.get('instruction_id', None),
+                   instruction_title=d.get('instruction_title', None),
+                   last_updated_timestamp=d.get('last_updated_timestamp', None),
+                   query=d.get('query', None),
+                   title=d.get('title', None))
+
+
+@dataclass
+class Result:
+    is_truncated: Optional[bool] = None
+    """If result is truncated"""
+
+    row_count: Optional[int] = None
+    """Row count of the result"""
+
+    statement_id: Optional[str] = None
+    """Statement Execution API statement id. Use [Get status, manifest, and result first
+    chunk](:method:statementexecution/getstatement) to get the full result data."""
+
+    def as_dict(self) -> dict:
+        """Serializes the Result into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.is_truncated is not None: body['is_truncated'] = self.is_truncated
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Result:
+        """Deserializes the Result from a dictionary."""
+        return cls(is_truncated=d.get('is_truncated', None),
+                   row_count=d.get('row_count', None),
+                   statement_id=d.get('statement_id', None))
+
+
 @dataclass
 class Schedule:
     cron_schedule: CronSchedule
@@ -565,6 +984,26 @@ def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberUser:
         return cls(user_id=d.get('user_id', None))
 
 
+@dataclass
+class TextAttachment:
+    content: Optional[str] = None
+    """AI generated message"""
+
+    id: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.id is not None: body['id'] = self.id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> TextAttachment:
+        """Deserializes the TextAttachment from a dictionary."""
+        return cls(content=d.get('content', None), id=d.get('id', None))
+
+
 @dataclass
 class TrashDashboardResponse:
 
@@ -603,10 +1042,15 @@ class UpdateDashboardRequest:
 
     etag: Optional[str] = None
     """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard
-    has not been modified since the last read."""
+    has not been modified since the last read. This field is excluded in List Dashboards responses."""
 
     serialized_dashboard: Optional[str] = None
-    """The contents of the dashboard in serialized string form."""
+    """The contents of the dashboard in serialized string form. This field is excluded in List
+    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
+    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
+    that represents the dashboard's layout and components.
+    
+    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
 
     warehouse_id: Optional[str] = None
     """The warehouse ID used to run the dashboard."""
@@ -675,6 +1119,193 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateScheduleRequest:
                    schedule_id=d.get('schedule_id', None))
 
 
+class GenieAPI:
+    """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
+    business users can use to ask questions using natural language. Genie uses data registered to Unity
+    Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
+    Assistant must be enabled."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def wait_get_message_genie_completed(
+            self,
+            conversation_id: str,
+            message_id: str,
+            space_id: str,
+            timeout=timedelta(minutes=20),
+            callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (MessageStatus.COMPLETED, )
+        failure_states = (MessageStatus.FAILED, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id)
+            status = poll.status
+            status_message = f'current status: {status}'
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach COMPLETED, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+
+    def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]:
+        """Create conversation message.
+        
+        Create new message in [conversation](:method:genie/startconversation). The AI response uses all
+        previously created messages in the conversation to respond.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the conversation is started.
+        :param conversation_id: str
+          The ID associated with the conversation.
+        :param content: str
+          User message content.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        """
+        body = {}
+        if content is not None: body['content'] = content
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do(
+            'POST',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages',
+            body=body,
+            headers=headers)
+        return Wait(self.wait_get_message_genie_completed,
+                    response=GenieMessage.from_dict(op_response),
+                    conversation_id=conversation_id,
+                    message_id=op_response['id'],
+                    space_id=space_id)
+
+    def create_message_and_wait(self,
+                                space_id: str,
+                                conversation_id: str,
+                                content: str,
+                                timeout=timedelta(minutes=20)) -> GenieMessage:
+        return self.create_message(content=content, conversation_id=conversation_id,
+                                   space_id=space_id).result(timeout=timeout)
+
+    def execute_message_query(self, space_id: str, conversation_id: str,
+                              message_id: str) -> GenieGetMessageQueryResultResponse:
+        """Execute SQL query in a conversation message.
+        
+        Execute the SQL query in the message.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'POST',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query',
+            headers=headers)
+        return GenieGetMessageQueryResultResponse.from_dict(res)
+
+    def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
+        """Get conversation message.
+        
+        Get message from conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the target conversation is located.
+        :param conversation_id: str
+          The ID associated with the target conversation.
+        :param message_id: str
+          The ID associated with the target message from the identified conversation.
+        
+        :returns: :class:`GenieMessage`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}',
+            headers=headers)
+        return GenieMessage.from_dict(res)
+
+    def get_message_query_result(self, space_id: str, conversation_id: str,
+                                 message_id: str) -> GenieGetMessageQueryResultResponse:
+        """Get conversation message SQL query result.
+        
+        Get the result of SQL query if the message has a query attachment. This is only available if a message
+        has a query attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result',
+            headers=headers)
+        return GenieGetMessageQueryResultResponse.from_dict(res)
+
+    def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
+        """Start conversation.
+        
+        Start a new conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where you want to start a conversation.
+        :param content: str
+          The text of the message that starts the conversation.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        """
+        body = {}
+        if content is not None: body['content'] = content
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do('POST',
+                                   f'/api/2.0/genie/spaces/{space_id}/start-conversation',
+                                   body=body,
+                                   headers=headers)
+        return Wait(self.wait_get_message_genie_completed,
+                    response=GenieStartConversationResponse.from_dict(op_response),
+                    conversation_id=op_response['conversation_id'],
+                    message_id=op_response['message_id'],
+                    space_id=space_id)
+
+    def start_conversation_and_wait(self, space_id: str, content: str,
+                                    timeout=timedelta(minutes=20)) -> GenieMessage:
+        return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
+
+
 class LakeviewAPI:
     """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
     be done with Workspace API (import, export, get-status, list, delete)."""
@@ -696,9 +1327,14 @@ def create(self,
           The display name of the dashboard.
         :param parent_path: str (optional)
           The workspace path of the folder containing the dashboard. Includes leading slash and no trailing
-          slash.
+          slash. This field is excluded in List Dashboards responses.
         :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form.
+          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
+          responses. Use the [get dashboard API] to retrieve an example response, which includes the
+          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
+          the dashboard's layout and components.
+          
+          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
         :param warehouse_id: str (optional)
           The warehouse ID used to run the dashboard.
         
@@ -911,8 +1547,7 @@ def list(self,
           The flag to include dashboards located in the trash. If unspecified, only active dashboards will be
           returned.
         :param view: :class:`DashboardView` (optional)
-          Indicates whether to include all metadata from the dashboard in the response. If unset, the response
-          defaults to `DASHBOARD_VIEW_BASIC` which only includes summary metadata from the dashboard.
+          `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard.
         
         :returns: Iterator over :class:`Dashboard`
         """
@@ -1111,9 +1746,14 @@ def update(self,
           The display name of the dashboard.
         :param etag: str (optional)
           The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has
-          not been modified since the last read.
+          not been modified since the last read. This field is excluded in List Dashboards responses.
         :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form.
+          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
+          responses. Use the [get dashboard API] to retrieve an example response, which includes the
+          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
+          the dashboard's layout and components.
+          
+          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
         :param warehouse_id: str (optional)
           The warehouse ID used to run the dashboard.
         
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index 27f448ccb..f1c56a1a9 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -132,16 +132,16 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
 
 
 @dataclass
-class DeleteWorkspaceAssignments:
+class DeleteWorkspacePermissionAssignmentResponse:
 
     def as_dict(self) -> dict:
-        """Serializes the DeleteWorkspaceAssignments into a dictionary suitable for use as a JSON request body."""
+        """Serializes the DeleteWorkspacePermissionAssignmentResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspaceAssignments:
-        """Deserializes the DeleteWorkspaceAssignments from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspacePermissionAssignmentResponse:
+        """Deserializes the DeleteWorkspacePermissionAssignmentResponse from a dictionary."""
         return cls()
 
 
@@ -406,6 +406,56 @@ def from_dict(cls, d: Dict[str, any]) -> ListUsersResponse:
                    total_results=d.get('totalResults', None))
 
 
+@dataclass
+class MigratePermissionsRequest:
+    workspace_id: int
+    """WorkspaceId of the associated workspace where the permission migration will occur."""
+
+    from_workspace_group_name: str
+    """The name of the workspace group that permissions will be migrated from."""
+
+    to_account_group_name: str
+    """The name of the account group that permissions will be migrated to."""
+
+    size: Optional[int] = None
+    """The maximum number of permissions that will be migrated."""
+
+    def as_dict(self) -> dict:
+        """Serializes the MigratePermissionsRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.from_workspace_group_name is not None:
+            body['from_workspace_group_name'] = self.from_workspace_group_name
+        if self.size is not None: body['size'] = self.size
+        if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsRequest:
+        """Deserializes the MigratePermissionsRequest from a dictionary."""
+        return cls(from_workspace_group_name=d.get('from_workspace_group_name', None),
+                   size=d.get('size', None),
+                   to_account_group_name=d.get('to_account_group_name', None),
+                   workspace_id=d.get('workspace_id', None))
+
+
+@dataclass
+class MigratePermissionsResponse:
+    permissions_migrated: Optional[int] = None
+    """Number of permissions migrated."""
+
+    def as_dict(self) -> dict:
+        """Serializes the MigratePermissionsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsResponse:
+        """Deserializes the MigratePermissionsResponse from a dictionary."""
+        return cls(permissions_migrated=d.get('permissions_migrated', None))
+
+
 @dataclass
 class Name:
     family_name: Optional[str] = None
@@ -723,6 +773,9 @@ def from_dict(cls, d: Dict[str, any]) -> Permission:
 
 @dataclass
 class PermissionAssignment:
+    """The output format for existing workspace PermissionAssignment records, which contains some info
+    for user consumption."""
+
     error: Optional[str] = None
     """Error response associated with a workspace permission assignment, if any."""
 
@@ -777,6 +830,7 @@ class PermissionLevel(Enum):
     CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS'
     CAN_MANAGE_RUN = 'CAN_MANAGE_RUN'
     CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS'
+    CAN_MONITOR = 'CAN_MONITOR'
     CAN_QUERY = 'CAN_QUERY'
     CAN_READ = 'CAN_READ'
     CAN_RESTART = 'CAN_RESTART'
@@ -787,57 +841,6 @@ class PermissionLevel(Enum):
     IS_OWNER = 'IS_OWNER'
 
 
-@dataclass
-class PermissionMigrationRequest:
-    workspace_id: int
-    """WorkspaceId of the associated workspace where the permission migration will occur. Both
-    workspace group and account group must be in this workspace."""
-
-    from_workspace_group_name: str
-    """The name of the workspace group that permissions will be migrated from."""
-
-    to_account_group_name: str
-    """The name of the account group that permissions will be migrated to."""
-
-    size: Optional[int] = None
-    """The maximum number of permissions that will be migrated."""
-
-    def as_dict(self) -> dict:
-        """Serializes the PermissionMigrationRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.from_workspace_group_name is not None:
-            body['from_workspace_group_name'] = self.from_workspace_group_name
-        if self.size is not None: body['size'] = self.size
-        if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> PermissionMigrationRequest:
-        """Deserializes the PermissionMigrationRequest from a dictionary."""
-        return cls(from_workspace_group_name=d.get('from_workspace_group_name', None),
-                   size=d.get('size', None),
-                   to_account_group_name=d.get('to_account_group_name', None),
-                   workspace_id=d.get('workspace_id', None))
-
-
-@dataclass
-class PermissionMigrationResponse:
-    permissions_migrated: Optional[int] = None
-    """Number of permissions migrated."""
-
-    def as_dict(self) -> dict:
-        """Serializes the PermissionMigrationResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> PermissionMigrationResponse:
-        """Deserializes the PermissionMigrationResponse from a dictionary."""
-        return cls(permissions_migrated=d.get('permissions_migrated', None))
-
-
 @dataclass
 class PermissionOutput:
     description: Optional[str] = None
@@ -888,9 +891,9 @@ class PermissionsRequest:
     """The id of the request object."""
 
     request_object_type: Optional[str] = None
-    """The type of the request object. Can be one of the following: authorization, clusters,
-    cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
-    registered-models, repos, serving-endpoints, or warehouses."""
+    """The type of the request object. Can be one of the following: alerts, authorization, clusters,
+    cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+    jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses."""
 
     def as_dict(self) -> dict:
         """Serializes the PermissionsRequest into a dictionary suitable for use as a JSON request body."""
@@ -911,6 +914,8 @@ def from_dict(cls, d: Dict[str, any]) -> PermissionsRequest:
 
 @dataclass
 class PrincipalOutput:
+    """Information about the principal assigned to the workspace."""
+
     display_name: Optional[str] = None
     """The display name of the principal."""
 
@@ -1134,16 +1139,18 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest:
 
 @dataclass
 class UpdateWorkspaceAssignments:
-    permissions: List[WorkspacePermission]
-    """Array of permissions assignments to update on the workspace. Note that excluding this field will
-    have the same effect as providing an empty list which will result in the deletion of all
+    permissions: Optional[List[WorkspacePermission]] = None
+    """Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN"
+    (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other
+    values will be ignored. Note that excluding this field, or providing unsupported values, will
+    have the same effect as providing an empty list, which will result in the deletion of all
     permissions for the principal."""
 
     principal_id: Optional[int] = None
     """The ID of the user, service principal, or group."""
 
     workspace_id: Optional[int] = None
-    """The workspace ID."""
+    """The workspace ID for the account."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body."""
@@ -2495,7 +2502,7 @@ def update(self,
 
 
 class PermissionMigrationAPI:
-    """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx."""
+    """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx"""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -2505,14 +2512,11 @@ def migrate_permissions(self,
                             from_workspace_group_name: str,
                             to_account_group_name: str,
                             *,
-                            size: Optional[int] = None) -> PermissionMigrationResponse:
+                            size: Optional[int] = None) -> MigratePermissionsResponse:
         """Migrate Permissions.
         
-        Migrate a batch of permissions from a workspace local group to an account group.
-        
         :param workspace_id: int
-          WorkspaceId of the associated workspace where the permission migration will occur. Both workspace
-          group and account group must be in this workspace.
+          WorkspaceId of the associated workspace where the permission migration will occur.
         :param from_workspace_group_name: str
           The name of the workspace group that permissions will be migrated from.
         :param to_account_group_name: str
@@ -2520,7 +2524,7 @@ def migrate_permissions(self,
         :param size: int (optional)
           The maximum number of permissions that will be migrated.
         
-        :returns: :class:`PermissionMigrationResponse`
+        :returns: :class:`MigratePermissionsResponse`
         """
         body = {}
         if from_workspace_group_name is not None:
@@ -2531,13 +2535,15 @@ def migrate_permissions(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/permissionmigration', body=body, headers=headers)
-        return PermissionMigrationResponse.from_dict(res)
+        return MigratePermissionsResponse.from_dict(res)
 
 
 class PermissionsAPI:
     """Permissions API are used to create read, write, edit, update and manage access for various users on
     different objects and endpoints.
     
+    * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
+    
     * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to
     clusters.
     
@@ -2573,7 +2579,7 @@ class PermissionsAPI:
     * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens.
     
     * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or
-    manage directories, files, and notebooks.
+    manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
     
     For the mapping of the required permissions for specific actions or abilities and other important
     information, see [Access Control].
@@ -2593,9 +2599,9 @@ def get(self, request_object_type: str, request_object_id: str) -> ObjectPermiss
         object.
         
         :param request_object_type: str
-          The type of the request object. Can be one of the following: authorization, clusters,
-          cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
-          registered-models, repos, serving-endpoints, or warehouses.
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
         :param request_object_id: str
           The id of the request object.
         
@@ -2641,9 +2647,9 @@ def set(self,
         object.
         
         :param request_object_type: str
-          The type of the request object. Can be one of the following: authorization, clusters,
-          cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
-          registered-models, repos, serving-endpoints, or warehouses.
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
         :param request_object_id: str
           The id of the request object.
         :param access_control_list: List[:class:`AccessControlRequest`] (optional)
@@ -2672,9 +2678,9 @@ def update(self,
         root object.
         
         :param request_object_type: str
-          The type of the request object. Can be one of the following: authorization, clusters,
-          cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
-          registered-models, repos, serving-endpoints, or warehouses.
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
         :param request_object_id: str
           The id of the request object.
         :param access_control_list: List[:class:`AccessControlRequest`] (optional)
@@ -3313,7 +3319,7 @@ def delete(self, workspace_id: int, principal_id: int):
         principal.
         
         :param workspace_id: int
-          The workspace ID.
+          The workspace ID for the account.
         :param principal_id: int
           The ID of the user, service principal, or group.
         
@@ -3366,21 +3372,26 @@ def list(self, workspace_id: int) -> Iterator[PermissionAssignment]:
         parsed = PermissionAssignments.from_dict(json).permission_assignments
         return parsed if parsed is not None else []
 
-    def update(self, workspace_id: int, principal_id: int,
-               permissions: List[WorkspacePermission]) -> PermissionAssignment:
+    def update(self,
+               workspace_id: int,
+               principal_id: int,
+               *,
+               permissions: Optional[List[WorkspacePermission]] = None) -> PermissionAssignment:
         """Create or update permissions assignment.
         
         Creates or updates the workspace permissions assignment in a given account and workspace for the
         specified principal.
         
         :param workspace_id: int
-          The workspace ID.
+          The workspace ID for the account.
         :param principal_id: int
           The ID of the user, service principal, or group.
-        :param permissions: List[:class:`WorkspacePermission`]
-          Array of permissions assignments to update on the workspace. Note that excluding this field will
-          have the same effect as providing an empty list which will result in the deletion of all permissions
-          for the principal.
+        :param permissions: List[:class:`WorkspacePermission`] (optional)
+          Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN"
+          (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values
+          will be ignored. Note that excluding this field, or providing unsupported values, will have the same
+          effect as providing an empty list, which will result in the deletion of all permissions for the
+          principal.
         
         :returns: :class:`PermissionAssignment`
         """
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 8824ca115..56c4a838e 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -15,7 +15,7 @@
 
 _LOG = logging.getLogger('databricks.sdk')
 
-from databricks.sdk.service import compute, iam
+from databricks.sdk.service import compute
 
 # all definitions in this file are in alphabetical order
 
@@ -29,6 +29,12 @@ class BaseJob:
     """The creator user name. This field won’t be included in the response if the user has already
     been deleted."""
 
+    effective_budget_policy_id: Optional[str] = None
+    """The id of the budget policy used by this job for cost attribution purposes. This may be set
+    through (in order of precedence): 1. Budget admins through the account or workspace console 2.
+    Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
+    on accessible budget policies of the run_as identity on job creation or modification."""
+
     job_id: Optional[int] = None
     """The canonical identifier for this job."""
 
@@ -41,6 +47,8 @@ def as_dict(self) -> dict:
         body = {}
         if self.created_time is not None: body['created_time'] = self.created_time
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
@@ -50,6 +58,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseJob:
         """Deserializes the BaseJob from a dictionary."""
         return cls(created_time=d.get('created_time', None),
                    creator_user_name=d.get('creator_user_name', None),
+                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
                    job_id=d.get('job_id', None),
                    settings=_from_dict(d, 'settings', JobSettings))
 
@@ -58,8 +67,8 @@ def from_dict(cls, d: Dict[str, any]) -> BaseJob:
 class BaseRun:
     attempt_number: Optional[int] = None
     """The sequence number of this run attempt for a triggered job run. The initial attempt of a run
-    has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy
-    (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the
+    has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy
+    (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the
     original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they
     succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job."""
 
@@ -115,6 +124,11 @@ class BaseRun:
     job_parameters: Optional[List[JobParameter]] = None
     """Job-level parameters used in the run"""
 
+    job_run_id: Optional[int] = None
+    """ID of the job run that this run belongs to. For legacy and single-task job runs the field is
+    populated with the job run ID. For task runs, the field is populated with the ID of the job run
+    that the task run belongs to."""
+
     number_in_job: Optional[int] = None
     """A unique identifier for this job run. This is set to the same value as `run_id`."""
 
@@ -166,7 +180,10 @@ class BaseRun:
     scheduled to run on a new cluster, this is the time the cluster creation call is issued."""
 
     state: Optional[RunState] = None
-    """The current state of the run."""
+    """Deprecated. Please use the `status` field instead."""
+
+    status: Optional[RunStatus] = None
+    """The current status of the run"""
 
     tasks: Optional[List[RunTask]] = None
     """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
@@ -201,6 +218,7 @@ def as_dict(self) -> dict:
         if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
+        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
         if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
         if self.original_attempt_run_id is not None:
             body['original_attempt_run_id'] = self.original_attempt_run_id
@@ -216,6 +234,7 @@ def as_dict(self) -> dict:
         if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
         if self.start_time is not None: body['start_time'] = self.start_time
         if self.state: body['state'] = self.state.as_dict()
+        if self.status: body['status'] = self.status.as_dict()
         if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
         if self.trigger is not None: body['trigger'] = self.trigger.value
         if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict()
@@ -236,6 +255,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun:
                    job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
                    job_id=d.get('job_id', None),
                    job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
+                   job_run_id=d.get('job_run_id', None),
                    number_in_job=d.get('number_in_job', None),
                    original_attempt_run_id=d.get('original_attempt_run_id', None),
                    overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
@@ -250,6 +270,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun:
                    setup_duration=d.get('setup_duration', None),
                    start_time=d.get('start_time', None),
                    state=_from_dict(d, 'state', RunState),
+                   status=_from_dict(d, 'status', RunStatus),
                    tasks=_repeated_dict(d, 'tasks', RunTask),
                    trigger=_enum(d, 'trigger', TriggerType),
                    trigger_info=_from_dict(d, 'trigger_info', TriggerInfo))
@@ -469,9 +490,14 @@ def from_dict(cls, d: Dict[str, any]) -> Continuous:
 
 @dataclass
 class CreateJob:
-    access_control_list: Optional[List[iam.AccessControlRequest]] = None
+    access_control_list: Optional[List[JobAccessControlRequest]] = None
     """List of permissions to set on the job."""
 
+    budget_policy_id: Optional[str] = None
+    """The id of the user specified budget policy to use for this job. If not specified, a default
+    budget policy may be applied when creating or modifying the job. See
+    `effective_budget_policy_id` for the budget policy used by this workload."""
+
     continuous: Optional[Continuous] = None
     """An optional continuous property for this job. The continuous property will ensure that there is
     always one run executing. Only one of `schedule` and `continuous` can be used."""
@@ -480,7 +506,7 @@ class CreateJob:
     """Deployment information for jobs managed by external sources."""
 
     description: Optional[str] = None
-    """An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding."""
+    """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding."""
 
     edit_mode: Optional[JobEditMode] = None
     """Edit mode of the job.
@@ -493,7 +519,11 @@ class CreateJob:
     well as when this job is deleted."""
 
     environments: Optional[List[JobEnvironment]] = None
-    """A list of task execution environment specifications that can be referenced by tasks of this job."""
+    """A list of task execution environment specifications that can be referenced by serverless tasks
+    of this job. An environment is required to be present for serverless tasks. For serverless
+    notebook tasks, the environment is accessible in the notebook environment panel. For other
+    serverless tasks, the task environment is required to be specified using environment_key in the
+    task settings."""
 
     format: Optional[Format] = None
     """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls.
@@ -541,12 +571,11 @@ class CreateJob:
     """The queue settings of the job."""
 
     run_as: Optional[JobRunAs] = None
-    """Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user
-    or service principal that the job runs as. If not specified, the job runs as the user who
-    created the job.
+    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
+    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
-    is thrown."""
+    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
+    an error is thrown."""
 
     schedule: Optional[CronSchedule] = None
     """An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -576,6 +605,7 @@ def as_dict(self) -> dict:
         body = {}
         if self.access_control_list:
             body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.continuous: body['continuous'] = self.continuous.as_dict()
         if self.deployment: body['deployment'] = self.deployment.as_dict()
         if self.description is not None: body['description'] = self.description
@@ -603,7 +633,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateJob:
         """Deserializes the CreateJob from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest),
+        return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
+                   budget_policy_id=d.get('budget_policy_id', None),
                    continuous=_from_dict(d, 'continuous', Continuous),
                    deployment=_from_dict(d, 'deployment', JobDeployment),
                    description=d.get('description', None),
@@ -827,6 +858,96 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse:
         return cls()
 
 
+@dataclass
+class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
+    """Represents a change to the job cluster's settings that would be required for the job clusters to
+    become compliant with their policies."""
+
+    field: Optional[str] = None
+    """The field where this change would be made, prepended with the job cluster key."""
+
+    new_value: Optional[str] = None
+    """The new value of this field after enforcing policy compliance (either a number, a boolean, or a
+    string) converted to a string. This is intended to be read by a human. The typed new value of
+    this field can be retrieved by reading the settings field in the API response."""
+
+    previous_value: Optional[str] = None
+    """The previous value of this field before enforcing policy compliance (either a number, a boolean,
+    or a string) converted to a string. This is intended to be read by a human. The type of the
+    field can be retrieved by reading the settings field in the API response."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.field is not None: body['field'] = self.field
+        if self.new_value is not None: body['new_value'] = self.new_value
+        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
+        """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary."""
+        return cls(field=d.get('field', None),
+                   new_value=d.get('new_value', None),
+                   previous_value=d.get('previous_value', None))
+
+
+@dataclass
+class EnforcePolicyComplianceRequest:
+    job_id: int
+    """The ID of the job you want to enforce policy compliance on."""
+
+    validate_only: Optional[bool] = None
+    """If set, previews changes made to the job to comply with its policy, but does not update the job."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceRequest:
+        """Deserializes the EnforcePolicyComplianceRequest from a dictionary."""
+        return cls(job_id=d.get('job_id', None), validate_only=d.get('validate_only', None))
+
+
+@dataclass
+class EnforcePolicyComplianceResponse:
+    has_changes: Optional[bool] = None
+    """Whether any changes have been made to the job cluster settings for the job to become compliant
+    with its policies."""
+
+    job_cluster_changes: Optional[List[EnforcePolicyComplianceForJobResponseJobClusterSettingsChange]] = None
+    """A list of job cluster changes that have been made to the job’s cluster settings in order for
+    all job clusters to become compliant with their policies."""
+
+    settings: Optional[JobSettings] = None
+    """Updated job settings after policy enforcement. Policy enforcement only applies to job clusters
+    that are created when running the job (which are specified in new_cluster) and does not apply to
+    existing all-purpose clusters. Updated job settings are derived by applying policy default
+    values to the existing job clusters in order to satisfy policy requirements."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        if self.job_cluster_changes:
+            body['job_cluster_changes'] = [v.as_dict() for v in self.job_cluster_changes]
+        if self.settings: body['settings'] = self.settings.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceResponse:
+        """Deserializes the EnforcePolicyComplianceResponse from a dictionary."""
+        return cls(has_changes=d.get('has_changes', None),
+                   job_cluster_changes=_repeated_dict(
+                       d, 'job_cluster_changes',
+                       EnforcePolicyComplianceForJobResponseJobClusterSettingsChange),
+                   settings=_from_dict(d, 'settings', JobSettings))
+
+
 @dataclass
 class ExportRunOutput:
     """Run was exported successfully."""
@@ -914,7 +1035,8 @@ class ForEachTask:
     """Configuration for the task that will be run for each element in the array"""
 
     concurrency: Optional[int] = None
-    """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
+    """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to
+    be able to execute multiple runs of the task concurrently."""
 
     def as_dict(self) -> dict:
         """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body."""
@@ -1024,6 +1146,32 @@ def from_dict(cls, d: Dict[str, any]) -> GetJobPermissionLevelsResponse:
         return cls(permission_levels=_repeated_dict(d, 'permission_levels', JobPermissionsDescription))
 
 
+@dataclass
+class GetPolicyComplianceResponse:
+    is_compliant: Optional[bool] = None
+    """Whether the job is compliant with its policies or not. Jobs could be out of compliance if a
+    policy they are using was updated after the job was last edited and some of its job clusters no
+    longer comply with their updated policies."""
+
+    violations: Optional[Dict[str, str]] = None
+    """An object containing key-value mappings representing the first 200 policy validation errors. The
+    keys indicate the path where the policy validation error is occurring. An identifier for the job
+    cluster is prepended to the path. The values indicate an error message describing the policy
+    validation error."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GetPolicyComplianceResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetPolicyComplianceResponse:
+        """Deserializes the GetPolicyComplianceResponse from a dictionary."""
+        return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None))
+
+
 class GitProvider(Enum):
 
     AWS_CODE_COMMIT = 'awsCodeCommit'
@@ -1136,6 +1284,12 @@ class Job:
     """The creator user name. This field won’t be included in the response if the user has already
     been deleted."""
 
+    effective_budget_policy_id: Optional[str] = None
+    """The id of the budget policy used by this job for cost attribution purposes. This may be set
+    through (in order of precedence): 1. Budget admins through the account or workspace console 2.
+    Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
+    on accessible budget policies of the run_as identity on job creation or modification."""
+
     job_id: Optional[int] = None
     """The canonical identifier for this job."""
 
@@ -1157,6 +1311,8 @@ def as_dict(self) -> dict:
         body = {}
         if self.created_time is not None: body['created_time'] = self.created_time
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
         if self.settings: body['settings'] = self.settings.as_dict()
@@ -1167,6 +1323,7 @@ def from_dict(cls, d: Dict[str, any]) -> Job:
         """Deserializes the Job from a dictionary."""
         return cls(created_time=d.get('created_time', None),
                    creator_user_name=d.get('creator_user_name', None),
+                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
                    job_id=d.get('job_id', None),
                    run_as_user_name=d.get('run_as_user_name', None),
                    settings=_from_dict(d, 'settings', JobSettings))
@@ -1267,6 +1424,36 @@ def from_dict(cls, d: Dict[str, any]) -> JobCluster:
                    new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec))
 
 
+@dataclass
+class JobCompliance:
+    job_id: int
+    """Canonical unique identifier for a job."""
+
+    is_compliant: Optional[bool] = None
+    """Whether this job is in compliance with the latest version of its policy."""
+
+    violations: Optional[Dict[str, str]] = None
+    """An object containing key-value mappings representing the first 200 policy validation errors. The
+    keys indicate the path where the policy validation error is occurring. An identifier for the job
+    cluster is prepended to the path. The values indicate an error message describing the policy
+    validation error."""
+
+    def as_dict(self) -> dict:
+        """Serializes the JobCompliance into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.violations: body['violations'] = self.violations
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> JobCompliance:
+        """Deserializes the JobCompliance from a dictionary."""
+        return cls(is_compliant=d.get('is_compliant', None),
+                   job_id=d.get('job_id', None),
+                   violations=d.get('violations', None))
+
+
 @dataclass
 class JobDeployment:
     kind: JobDeploymentKind
@@ -1310,7 +1497,8 @@ class JobEditMode(Enum):
 @dataclass
 class JobEmailNotifications:
     no_alert_for_skipped_runs: Optional[bool] = None
-    """If true, do not send email to recipients specified in `on_failure` if the run is skipped."""
+    """If true, do not send email to recipients specified in `on_failure` if the run is skipped. This
+    field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field."""
 
     on_duration_warning_threshold_exceeded: Optional[List[str]] = None
     """A list of email addresses to be notified when the duration of a run exceeds the threshold
@@ -1568,12 +1756,11 @@ def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest:
 
 @dataclass
 class JobRunAs:
-    """Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user
-    or service principal that the job runs as. If not specified, the job runs as the user who
-    created the job.
+    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
+    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
-    is thrown."""
+    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
+    an error is thrown."""
 
     service_principal_name: Optional[str] = None
     """Application ID of an active service principal. Setting this field requires the
@@ -1600,6 +1787,11 @@ def from_dict(cls, d: Dict[str, any]) -> JobRunAs:
 
 @dataclass
 class JobSettings:
+    budget_policy_id: Optional[str] = None
+    """The id of the user specified budget policy to use for this job. If not specified, a default
+    budget policy may be applied when creating or modifying the job. See
+    `effective_budget_policy_id` for the budget policy used by this workload."""
+
     continuous: Optional[Continuous] = None
     """An optional continuous property for this job. The continuous property will ensure that there is
     always one run executing. Only one of `schedule` and `continuous` can be used."""
@@ -1608,7 +1800,7 @@ class JobSettings:
     """Deployment information for jobs managed by external sources."""
 
     description: Optional[str] = None
-    """An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding."""
+    """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding."""
 
     edit_mode: Optional[JobEditMode] = None
     """Edit mode of the job.
@@ -1621,7 +1813,11 @@ class JobSettings:
     well as when this job is deleted."""
 
     environments: Optional[List[JobEnvironment]] = None
-    """A list of task execution environment specifications that can be referenced by tasks of this job."""
+    """A list of task execution environment specifications that can be referenced by serverless tasks
+    of this job. An environment is required to be present for serverless tasks. For serverless
+    notebook tasks, the environment is accessible in the notebook environment panel. For other
+    serverless tasks, the task environment is required to be specified using environment_key in the
+    task settings."""
 
     format: Optional[Format] = None
     """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls.
@@ -1669,12 +1865,11 @@ class JobSettings:
     """The queue settings of the job."""
 
     run_as: Optional[JobRunAs] = None
-    """Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user
-    or service principal that the job runs as. If not specified, the job runs as the user who
-    created the job.
+    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
+    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
-    is thrown."""
+    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
+    an error is thrown."""
 
     schedule: Optional[CronSchedule] = None
     """An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -1702,6 +1897,7 @@ class JobSettings:
     def as_dict(self) -> dict:
         """Serializes the JobSettings into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.continuous: body['continuous'] = self.continuous.as_dict()
         if self.deployment: body['deployment'] = self.deployment.as_dict()
         if self.description is not None: body['description'] = self.description
@@ -1729,7 +1925,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSettings:
         """Deserializes the JobSettings from a dictionary."""
-        return cls(continuous=_from_dict(d, 'continuous', Continuous),
+        return cls(budget_policy_id=d.get('budget_policy_id', None),
+                   continuous=_from_dict(d, 'continuous', Continuous),
                    deployment=_from_dict(d, 'deployment', JobDeployment),
                    description=d.get('description', None),
                    edit_mode=_enum(d, 'edit_mode', JobEditMode),
@@ -1881,6 +2078,35 @@ def from_dict(cls, d: Dict[str, any]) -> JobsHealthRules:
         return cls(rules=_repeated_dict(d, 'rules', JobsHealthRule))
 
 
+@dataclass
+class ListJobComplianceForPolicyResponse:
+    jobs: Optional[List[JobCompliance]] = None
+    """A list of jobs and their policy compliance statuses."""
+
+    next_page_token: Optional[str] = None
+    """This field represents the pagination token to retrieve the next page of results. If this field
+    is not in the response, it means no further results for the request."""
+
+    prev_page_token: Optional[str] = None
+    """This field represents the pagination token to retrieve the previous page of results. If this
+    field is not in the response, it means no further results for the request."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListJobComplianceForPolicyResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListJobComplianceForPolicyResponse:
+        """Deserializes the ListJobComplianceForPolicyResponse from a dictionary."""
+        return cls(jobs=_repeated_dict(d, 'jobs', JobCompliance),
+                   next_page_token=d.get('next_page_token', None),
+                   prev_page_token=d.get('prev_page_token', None))
+
+
 @dataclass
 class ListJobsResponse:
     """List of jobs was retrieved successfully."""
@@ -2062,7 +2288,6 @@ class PeriodicTriggerConfigurationTimeUnit(Enum):
 
     DAYS = 'DAYS'
     HOURS = 'HOURS'
-    TIME_UNIT_UNSPECIFIED = 'TIME_UNIT_UNSPECIFIED'
     WEEKS = 'WEEKS'
 
 
@@ -2139,6 +2364,44 @@ def from_dict(cls, d: Dict[str, any]) -> PythonWheelTask:
                    parameters=d.get('parameters', None))
 
 
+@dataclass
+class QueueDetails:
+    code: Optional[QueueDetailsCodeCode] = None
+    """The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to
+    reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was
+    queued due to reaching the per-job limit of concurrent job runs. *
+    `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of
+    active run job tasks."""
+
+    message: Optional[str] = None
+    """A descriptive message with the queuing details. This field is unstructured, and its exact format
+    is subject to change."""
+
+    def as_dict(self) -> dict:
+        """Serializes the QueueDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.code is not None: body['code'] = self.code.value
+        if self.message is not None: body['message'] = self.message
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QueueDetails:
+        """Deserializes the QueueDetails from a dictionary."""
+        return cls(code=_enum(d, 'code', QueueDetailsCodeCode), message=d.get('message', None))
+
+
+class QueueDetailsCodeCode(Enum):
+    """The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to
+    reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was
+    queued due to reaching the per-job limit of concurrent job runs. *
+    `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of
+    active run job tasks."""
+
+    ACTIVE_RUNS_LIMIT_REACHED = 'ACTIVE_RUNS_LIMIT_REACHED'
+    ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED = 'ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED'
+    MAX_CONCURRENT_RUNS_REACHED = 'MAX_CONCURRENT_RUNS_REACHED'
+
+
 @dataclass
 class QueueSettings:
     enabled: bool
@@ -2168,7 +2431,10 @@ class RepairHistoryItem:
     """The start time of the (repaired) run."""
 
     state: Optional[RunState] = None
-    """The current state of the run."""
+    """Deprecated. Please use the `status` field instead."""
+
+    status: Optional[RunStatus] = None
+    """The current status of the run"""
 
     task_run_ids: Optional[List[int]] = None
     """The run IDs of the task runs that ran as part of this repair history item."""
@@ -2183,6 +2449,7 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         if self.start_time is not None: body['start_time'] = self.start_time
         if self.state: body['state'] = self.state.as_dict()
+        if self.status: body['status'] = self.status.as_dict()
         if self.task_run_ids: body['task_run_ids'] = [v for v in self.task_run_ids]
         if self.type is not None: body['type'] = self.type.value
         return body
@@ -2194,6 +2461,7 @@ def from_dict(cls, d: Dict[str, any]) -> RepairHistoryItem:
                    id=d.get('id', None),
                    start_time=d.get('start_time', None),
                    state=_from_dict(d, 'state', RunState),
+                   status=_from_dict(d, 'status', RunStatus),
                    task_run_ids=d.get('task_run_ids', None),
                    type=_enum(d, 'type', RepairHistoryItemType))
 
@@ -2249,6 +2517,7 @@ class RepairRun:
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
     pipeline_params: Optional[PipelineParams] = None
+    """Controls whether the pipeline should perform a full refresh"""
 
     python_named_params: Optional[Dict[str, str]] = None
 
@@ -2576,8 +2845,8 @@ class Run:
 
     attempt_number: Optional[int] = None
     """The sequence number of this run attempt for a triggered job run. The initial attempt of a run
-    has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy
-    (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the
+    has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy
+    (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the
     original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they
     succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job."""
 
@@ -2622,6 +2891,9 @@ class Run:
     Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
     are used, `git_source` must be defined on the job."""
 
+    iterations: Optional[List[RunTask]] = None
+    """Only populated by for-each iterations. The parent for-each task is located in tasks array."""
+
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
@@ -2633,6 +2905,14 @@ class Run:
     job_parameters: Optional[List[JobParameter]] = None
     """Job-level parameters used in the run"""
 
+    job_run_id: Optional[int] = None
+    """ID of the job run that this run belongs to. For legacy and single-task job runs the field is
+    populated with the job run ID. For task runs, the field is populated with the ID of the job run
+    that the task run belongs to."""
+
+    next_page_token: Optional[str] = None
+    """A token that can be used to list the next page of sub-resources."""
+
     number_in_job: Optional[int] = None
     """A unique identifier for this job run. This is set to the same value as `run_id`."""
 
@@ -2643,6 +2923,9 @@ class Run:
     overriding_parameters: Optional[RunParameters] = None
     """The parameters used for this run."""
 
+    prev_page_token: Optional[str] = None
+    """A token that can be used to list the previous page of sub-resources."""
+
     queue_duration: Optional[int] = None
     """The time in milliseconds that the run has spent in the queue."""
 
@@ -2684,7 +2967,10 @@ class Run:
     scheduled to run on a new cluster, this is the time the cluster creation call is issued."""
 
     state: Optional[RunState] = None
-    """The current state of the run."""
+    """Deprecated. Please use the `status` field instead."""
+
+    status: Optional[RunStatus] = None
+    """The current status of the run"""
 
     tasks: Optional[List[RunTask]] = None
     """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
@@ -2716,13 +3002,17 @@ def as_dict(self) -> dict:
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source.as_dict()
+        if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations]
         if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
+        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
         if self.original_attempt_run_id is not None:
             body['original_attempt_run_id'] = self.original_attempt_run_id
         if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
         if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
         if self.run_duration is not None: body['run_duration'] = self.run_duration
@@ -2734,6 +3024,7 @@ def as_dict(self) -> dict:
         if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
         if self.start_time is not None: body['start_time'] = self.start_time
         if self.state: body['state'] = self.state.as_dict()
+        if self.status: body['status'] = self.status.as_dict()
         if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
         if self.trigger is not None: body['trigger'] = self.trigger.value
         if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict()
@@ -2751,12 +3042,16 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    end_time=d.get('end_time', None),
                    execution_duration=d.get('execution_duration', None),
                    git_source=_from_dict(d, 'git_source', GitSource),
+                   iterations=_repeated_dict(d, 'iterations', RunTask),
                    job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
                    job_id=d.get('job_id', None),
                    job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
+                   job_run_id=d.get('job_run_id', None),
+                   next_page_token=d.get('next_page_token', None),
                    number_in_job=d.get('number_in_job', None),
                    original_attempt_run_id=d.get('original_attempt_run_id', None),
                    overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
+                   prev_page_token=d.get('prev_page_token', None),
                    queue_duration=d.get('queue_duration', None),
                    repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
                    run_duration=d.get('run_duration', None),
@@ -2768,6 +3063,7 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    setup_duration=d.get('setup_duration', None),
                    start_time=d.get('start_time', None),
                    state=_from_dict(d, 'state', RunState),
+                   status=_from_dict(d, 'status', RunStatus),
                    tasks=_repeated_dict(d, 'tasks', RunTask),
                    trigger=_enum(d, 'trigger', TriggerType),
                    trigger_info=_from_dict(d, 'trigger_info', TriggerInfo))
@@ -2825,7 +3121,8 @@ class RunForEachTask:
     """Configuration for the task that will be run for each element in the array"""
 
     concurrency: Optional[int] = None
-    """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100."""
+    """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to
+    be able to execute multiple runs of the task concurrently."""
 
     stats: Optional[ForEachStats] = None
     """Read only field. Populated for GetRun and ListRuns RPC calls and stores the execution stats of
@@ -2924,6 +3221,7 @@ class RunJobTask:
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
     pipeline_params: Optional[PipelineParams] = None
+    """Controls whether the pipeline should perform a full refresh"""
 
     python_named_params: Optional[Dict[str, str]] = None
 
@@ -3018,6 +3316,17 @@ class RunLifeCycleState(Enum):
     WAITING_FOR_RETRY = 'WAITING_FOR_RETRY'
 
 
+class RunLifecycleStateV2State(Enum):
+    """The current state of the run."""
+
+    BLOCKED = 'BLOCKED'
+    PENDING = 'PENDING'
+    QUEUED = 'QUEUED'
+    RUNNING = 'RUNNING'
+    TERMINATED = 'TERMINATED'
+    TERMINATING = 'TERMINATING'
+
+
 @dataclass
 class RunNow:
     job_id: int
@@ -3072,6 +3381,7 @@ class RunNow:
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
     pipeline_params: Optional[PipelineParams] = None
+    """Controls whether the pipeline should perform a full refresh"""
 
     python_named_params: Optional[Dict[str, str]] = None
 
@@ -3281,6 +3591,7 @@ class RunParameters:
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
     pipeline_params: Optional[PipelineParams] = None
+    """Controls whether the pipeline should perform a full refresh"""
 
     python_named_params: Optional[Dict[str, str]] = None
 
@@ -3355,9 +3666,11 @@ class RunResultState(Enum):
     reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. *
     `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were
     successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. *
-    `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled."""
+    `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. * `DISABLED`:
+    The run was skipped because it was disabled explicitly by the user."""
 
     CANCELED = 'CANCELED'
+    DISABLED = 'DISABLED'
     EXCLUDED = 'EXCLUDED'
     FAILED = 'FAILED'
     MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
@@ -3411,6 +3724,36 @@ def from_dict(cls, d: Dict[str, any]) -> RunState:
                    user_cancelled_or_timedout=d.get('user_cancelled_or_timedout', None))
 
 
+@dataclass
+class RunStatus:
+    """The current status of the run"""
+
+    queue_details: Optional[QueueDetails] = None
+    """If the run was queued, details about the reason for queuing the run."""
+
+    state: Optional[RunLifecycleStateV2State] = None
+    """The current state of the run."""
+
+    termination_details: Optional[TerminationDetails] = None
+    """If the run is in a TERMINATING or TERMINATED state, details about the reason for terminating the
+    run."""
+
+    def as_dict(self) -> dict:
+        """Serializes the RunStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.queue_details: body['queue_details'] = self.queue_details.as_dict()
+        if self.state is not None: body['state'] = self.state.value
+        if self.termination_details: body['termination_details'] = self.termination_details.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> RunStatus:
+        """Deserializes the RunStatus from a dictionary."""
+        return cls(queue_details=_from_dict(d, 'queue_details', QueueDetails),
+                   state=_enum(d, 'state', RunLifecycleStateV2State),
+                   termination_details=_from_dict(d, 'termination_details', TerminationDetails))
+
+
 @dataclass
 class RunTask:
     """Used when outputting a child run, in GetRun or ListRuns."""
@@ -3423,8 +3766,8 @@ class RunTask:
 
     attempt_number: Optional[int] = None
     """The sequence number of this run attempt for a triggered job run. The initial attempt of a run
-    has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy
-    (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the
+    has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy
+    (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the
     original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they
     succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job."""
 
@@ -3576,7 +3919,10 @@ class RunTask:
     scheduled to run on a new cluster, this is the time the cluster creation call is issued."""
 
     state: Optional[RunState] = None
-    """The current state of the run."""
+    """Deprecated. Please use the `status` field instead."""
+
+    status: Optional[RunStatus] = None
+    """The current status of the run"""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
@@ -3624,6 +3970,7 @@ def as_dict(self) -> dict:
         if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
         if self.start_time is not None: body['start_time'] = self.start_time
         if self.state: body['state'] = self.state.as_dict()
+        if self.status: body['status'] = self.status.as_dict()
         if self.task_key is not None: body['task_key'] = self.task_key
         if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
@@ -3667,6 +4014,7 @@ def from_dict(cls, d: Dict[str, any]) -> RunTask:
                    sql_task=_from_dict(d, 'sql_task', SqlTask),
                    start_time=d.get('start_time', None),
                    state=_from_dict(d, 'state', RunState),
+                   status=_from_dict(d, 'status', RunStatus),
                    task_key=d.get('task_key', None),
                    timeout_seconds=d.get('timeout_seconds', None),
                    webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
@@ -4196,9 +4544,13 @@ def from_dict(cls, d: Dict[str, any]) -> SqlTaskSubscription:
 
 @dataclass
 class SubmitRun:
-    access_control_list: Optional[List[iam.AccessControlRequest]] = None
+    access_control_list: Optional[List[JobAccessControlRequest]] = None
     """List of permissions to set on the job."""
 
+    budget_policy_id: Optional[str] = None
+    """The user specified id of the budget policy to use for this one-time run. If not specified, the
+    run will be not be attributed to any budget policy."""
+
     email_notifications: Optional[JobEmailNotifications] = None
     """An optional set of email addresses notified when the run begins or completes."""
 
@@ -4259,6 +4611,7 @@ def as_dict(self) -> dict:
         body = {}
         if self.access_control_list:
             body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
         if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
         if self.git_source: body['git_source'] = self.git_source.as_dict()
@@ -4276,7 +4629,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
         """Deserializes the SubmitRun from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest),
+        return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
+                   budget_policy_id=d.get('budget_policy_id', None),
                    email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
                    environments=_repeated_dict(d, 'environments', JobEnvironment),
                    git_source=_from_dict(d, 'git_source', GitSource),
@@ -4738,7 +5092,8 @@ def from_dict(cls, d: Dict[str, any]) -> TaskDependency:
 @dataclass
 class TaskEmailNotifications:
     no_alert_for_skipped_runs: Optional[bool] = None
-    """If true, do not send email to recipients specified in `on_failure` if the run is skipped."""
+    """If true, do not send email to recipients specified in `on_failure` if the run is skipped. This
+    field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field."""
 
     on_duration_warning_threshold_exceeded: Optional[List[str]] = None
     """A list of email addresses to be notified when the duration of a run exceeds the threshold
@@ -4830,6 +5185,150 @@ def from_dict(cls, d: Dict[str, any]) -> TaskNotificationSettings:
                    no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None))
 
 
+class TerminationCodeCode(Enum):
+    """The code indicates why the run was terminated. Additional codes might be introduced in future
+    releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was
+    successfully canceled during execution by a user. * `CANCELED`: The run was canceled during
+    execution by the Databricks platform; for example, if the maximum run duration was exceeded. *
+    `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency
+    type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The
+    run encountered an unexpected error. Refer to the state message for further details. *
+    `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. *
+    `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further
+    details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when
+    communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because
+    it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The
+    workspace has reached the quota for the maximum number of concurrent active runs. Consider
+    scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it
+    tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The
+    number of cluster creation, start, and upsize requests have exceeded the allotted rate limit.
+    Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run
+    failed due to an error when accessing the customer blob storage. Refer to the state message for
+    further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more
+    details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to
+    a permission issue while accessing a resource. Refer to the state message for further details. *
+    `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer
+    to the state message for further details. The causes might include, but are not limited to: The
+    provided library is invalid, there are insufficient permissions to install the library, and so
+    forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum
+    concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a
+    cluster that has already reached the maximum number of contexts it is configured to create. See:
+    [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to
+    the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an
+    invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The
+    run failed due to a cloud provider issue. Refer to the state message for further details. *
+    `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size
+    limit.
+    
+    [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now"""
+
+    CANCELED = 'CANCELED'
+    CLOUD_FAILURE = 'CLOUD_FAILURE'
+    CLUSTER_ERROR = 'CLUSTER_ERROR'
+    CLUSTER_REQUEST_LIMIT_EXCEEDED = 'CLUSTER_REQUEST_LIMIT_EXCEEDED'
+    DRIVER_ERROR = 'DRIVER_ERROR'
+    FEATURE_DISABLED = 'FEATURE_DISABLED'
+    INTERNAL_ERROR = 'INTERNAL_ERROR'
+    INVALID_CLUSTER_REQUEST = 'INVALID_CLUSTER_REQUEST'
+    INVALID_RUN_CONFIGURATION = 'INVALID_RUN_CONFIGURATION'
+    LIBRARY_INSTALLATION_ERROR = 'LIBRARY_INSTALLATION_ERROR'
+    MAX_CONCURRENT_RUNS_EXCEEDED = 'MAX_CONCURRENT_RUNS_EXCEEDED'
+    MAX_JOB_QUEUE_SIZE_EXCEEDED = 'MAX_JOB_QUEUE_SIZE_EXCEEDED'
+    MAX_SPARK_CONTEXTS_EXCEEDED = 'MAX_SPARK_CONTEXTS_EXCEEDED'
+    REPOSITORY_CHECKOUT_FAILED = 'REPOSITORY_CHECKOUT_FAILED'
+    RESOURCE_NOT_FOUND = 'RESOURCE_NOT_FOUND'
+    RUN_EXECUTION_ERROR = 'RUN_EXECUTION_ERROR'
+    SKIPPED = 'SKIPPED'
+    STORAGE_ACCESS_ERROR = 'STORAGE_ACCESS_ERROR'
+    SUCCESS = 'SUCCESS'
+    UNAUTHORIZED_ERROR = 'UNAUTHORIZED_ERROR'
+    USER_CANCELED = 'USER_CANCELED'
+    WORKSPACE_RUN_LIMIT_EXCEEDED = 'WORKSPACE_RUN_LIMIT_EXCEEDED'
+
+
+@dataclass
+class TerminationDetails:
+    code: Optional[TerminationCodeCode] = None
+    """The code indicates why the run was terminated. Additional codes might be introduced in future
+    releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was
+    successfully canceled during execution by a user. * `CANCELED`: The run was canceled during
+    execution by the Databricks platform; for example, if the maximum run duration was exceeded. *
+    `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency
+    type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The
+    run encountered an unexpected error. Refer to the state message for further details. *
+    `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. *
+    `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further
+    details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when
+    communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because
+    it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The
+    workspace has reached the quota for the maximum number of concurrent active runs. Consider
+    scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it
+    tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The
+    number of cluster creation, start, and upsize requests have exceeded the allotted rate limit.
+    Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run
+    failed due to an error when accessing the customer blob storage. Refer to the state message for
+    further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more
+    details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to
+    a permission issue while accessing a resource. Refer to the state message for further details. *
+    `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer
+    to the state message for further details. The causes might include, but are not limited to: The
+    provided library is invalid, there are insufficient permissions to install the library, and so
+    forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum
+    concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a
+    cluster that has already reached the maximum number of contexts it is configured to create. See:
+    [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to
+    the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an
+    invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The
+    run failed due to a cloud provider issue. Refer to the state message for further details. *
+    `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size
+    limit.
+    
+    [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now"""
+
+    message: Optional[str] = None
+    """A descriptive message with the termination details. This field is unstructured and the format
+    might change."""
+
+    type: Optional[TerminationTypeType] = None
+    """* `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the
+    Databricks platform. Please look at the [status page] or contact support if the issue persists.
+    * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job
+    configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud
+    provider.
+    
+    [status page]: https://status.databricks.com/"""
+
+    def as_dict(self) -> dict:
+        """Serializes the TerminationDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.code is not None: body['code'] = self.code.value
+        if self.message is not None: body['message'] = self.message
+        if self.type is not None: body['type'] = self.type.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> TerminationDetails:
+        """Deserializes the TerminationDetails from a dictionary."""
+        return cls(code=_enum(d, 'code', TerminationCodeCode),
+                   message=d.get('message', None),
+                   type=_enum(d, 'type', TerminationTypeType))
+
+
+class TerminationTypeType(Enum):
+    """* `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the
+    Databricks platform. Please look at the [status page] or contact support if the issue persists.
+    * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job
+    configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud
+    provider.
+    
+    [status page]: https://status.databricks.com/"""
+
+    CLIENT_ERROR = 'CLIENT_ERROR'
+    CLOUD_FAILURE = 'CLOUD_FAILURE'
+    INTERNAL_ERROR = 'INTERNAL_ERROR'
+    SUCCESS = 'SUCCESS'
+
+
 @dataclass
 class TriggerInfo:
     """Additional details about what triggered the run"""
@@ -5167,7 +5666,8 @@ def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run
 
     def create(self,
                *,
-               access_control_list: Optional[List[iam.AccessControlRequest]] = None,
+               access_control_list: Optional[List[JobAccessControlRequest]] = None,
+               budget_policy_id: Optional[str] = None,
                continuous: Optional[Continuous] = None,
                deployment: Optional[JobDeployment] = None,
                description: Optional[str] = None,
@@ -5194,15 +5694,19 @@ def create(self,
         
         Create a new job.
         
-        :param access_control_list: List[:class:`AccessControlRequest`] (optional)
+        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The id of the user specified budget policy to use for this job. If not specified, a default budget
+          policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
+          budget policy used by this workload.
         :param continuous: :class:`Continuous` (optional)
           An optional continuous property for this job. The continuous property will ensure that there is
           always one run executing. Only one of `schedule` and `continuous` can be used.
         :param deployment: :class:`JobDeployment` (optional)
           Deployment information for jobs managed by external sources.
         :param description: str (optional)
-          An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding.
+          An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.
         :param edit_mode: :class:`JobEditMode` (optional)
           Edit mode of the job.
           
@@ -5212,7 +5716,10 @@ def create(self,
           An optional set of email addresses that is notified when runs of this job begin or complete as well
           as when this job is deleted.
         :param environments: List[:class:`JobEnvironment`] (optional)
-          A list of task execution environment specifications that can be referenced by tasks of this job.
+          A list of task execution environment specifications that can be referenced by serverless tasks of
+          this job. An environment is required to be present for serverless tasks. For serverless notebook
+          tasks, the environment is accessible in the notebook environment panel. For other serverless tasks,
+          the task environment is required to be specified using environment_key in the task settings.
         :param format: :class:`Format` (optional)
           Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
           using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
@@ -5249,12 +5756,11 @@ def create(self,
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user or
-          service principal that the job runs as. If not specified, the job runs as the user who created the
-          job.
+          Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
+          not specified, the job/pipeline runs as the user who created the job/pipeline.
           
-          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
-          thrown.
+          Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an
+          error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
           triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
@@ -5278,6 +5784,7 @@ def create(self,
         body = {}
         if access_control_list is not None:
             body['access_control_list'] = [v.as_dict() for v in access_control_list]
+        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
         if continuous is not None: body['continuous'] = continuous.as_dict()
         if deployment is not None: body['deployment'] = deployment.as_dict()
         if description is not None: body['description'] = description
@@ -5412,7 +5919,8 @@ def get_run(self,
                 run_id: int,
                 *,
                 include_history: Optional[bool] = None,
-                include_resolved_values: Optional[bool] = None) -> Run:
+                include_resolved_values: Optional[bool] = None,
+                page_token: Optional[str] = None) -> Run:
         """Get a single job run.
         
         Retrieve the metadata of a run.
@@ -5423,6 +5931,9 @@ def get_run(self,
           Whether to include the repair history in the response.
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
+        :param page_token: str (optional)
+          To list the next page or the previous page of job tasks, set this field to the value of the
+          `next_page_token` or `prev_page_token` returned in the GetJob response.
         
         :returns: :class:`Run`
         """
@@ -5430,6 +5941,7 @@ def get_run(self,
         query = {}
         if include_history is not None: query['include_history'] = include_history
         if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values
+        if page_token is not None: query['page_token'] = page_token
         if run_id is not None: query['run_id'] = run_id
         headers = {'Accept': 'application/json', }
 
@@ -5633,6 +6145,7 @@ def repair_run(self,
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
         :param python_params: List[str] (optional)
           A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
@@ -5822,6 +6335,7 @@ def run_now(self,
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
         :param python_params: List[str] (optional)
           A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
@@ -5937,7 +6451,8 @@ def set_permissions(
 
     def submit(self,
                *,
-               access_control_list: Optional[List[iam.AccessControlRequest]] = None,
+               access_control_list: Optional[List[JobAccessControlRequest]] = None,
+               budget_policy_id: Optional[str] = None,
                email_notifications: Optional[JobEmailNotifications] = None,
                environments: Optional[List[JobEnvironment]] = None,
                git_source: Optional[GitSource] = None,
@@ -5956,8 +6471,11 @@ def submit(self,
         Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
         run state after the job is submitted.
         
-        :param access_control_list: List[:class:`AccessControlRequest`] (optional)
+        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The user specified id of the budget policy to use for this one-time run. If not specified, the run
+          will be not be attributed to any budget policy.
         :param email_notifications: :class:`JobEmailNotifications` (optional)
           An optional set of email addresses notified when the run begins or completes.
         :param environments: List[:class:`JobEnvironment`] (optional)
@@ -6009,6 +6527,7 @@ def submit(self,
         body = {}
         if access_control_list is not None:
             body['access_control_list'] = [v.as_dict() for v in access_control_list]
+        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
         if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
         if environments is not None: body['environments'] = [v.as_dict() for v in environments]
         if git_source is not None: body['git_source'] = git_source.as_dict()
@@ -6031,7 +6550,8 @@ def submit(self,
     def submit_and_wait(
         self,
         *,
-        access_control_list: Optional[List[iam.AccessControlRequest]] = None,
+        access_control_list: Optional[List[JobAccessControlRequest]] = None,
+        budget_policy_id: Optional[str] = None,
         email_notifications: Optional[JobEmailNotifications] = None,
         environments: Optional[List[JobEnvironment]] = None,
         git_source: Optional[GitSource] = None,
@@ -6046,6 +6566,7 @@ def submit_and_wait(
         webhook_notifications: Optional[WebhookNotifications] = None,
         timeout=timedelta(minutes=20)) -> Run:
         return self.submit(access_control_list=access_control_list,
+                           budget_policy_id=budget_policy_id,
                            email_notifications=email_notifications,
                            environments=environments,
                            git_source=git_source,
@@ -6118,3 +6639,102 @@ def update_permissions(
 
         res = self._api.do('PATCH', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers)
         return JobPermissions.from_dict(res)
+
+
+class PolicyComplianceForJobsAPI:
+    """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.
+    This API currently only supports compliance controls for cluster policies.
+    
+    A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster
+    policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last
+    edited. The job is considered out of compliance if any of its clusters no longer comply with their updated
+    policies.
+    
+    The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce
+    compliance API allows you to update a job so that it becomes compliant with all of its policies."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def enforce_compliance(self,
+                           job_id: int,
+                           *,
+                           validate_only: Optional[bool] = None) -> EnforcePolicyComplianceResponse:
+        """Enforce job policy compliance.
+        
+        Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
+        are compliant with the current versions of their respective cluster policies. All-purpose clusters
+        used in the job will not be updated.
+        
+        :param job_id: int
+          The ID of the job you want to enforce policy compliance on.
+        :param validate_only: bool (optional)
+          If set, previews changes made to the job to comply with its policy, but does not update the job.
+        
+        :returns: :class:`EnforcePolicyComplianceResponse`
+        """
+        body = {}
+        if job_id is not None: body['job_id'] = job_id
+        if validate_only is not None: body['validate_only'] = validate_only
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/policies/jobs/enforce-compliance', body=body, headers=headers)
+        return EnforcePolicyComplianceResponse.from_dict(res)
+
+    def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse:
+        """Get job policy compliance.
+        
+        Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
+        they use was updated after the job was last edited and some of its job clusters no longer comply with
+        their updated policies.
+        
+        :param job_id: int
+          The ID of the job whose compliance status you are requesting.
+        
+        :returns: :class:`GetPolicyComplianceResponse`
+        """
+
+        query = {}
+        if job_id is not None: query['job_id'] = job_id
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', '/api/2.0/policies/jobs/get-compliance', query=query, headers=headers)
+        return GetPolicyComplianceResponse.from_dict(res)
+
+    def list_compliance(self,
+                        policy_id: str,
+                        *,
+                        page_size: Optional[int] = None,
+                        page_token: Optional[str] = None) -> Iterator[JobCompliance]:
+        """List job policy compliance.
+        
+        Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
+        compliance if a cluster policy they use was updated after the job was last edited and its job clusters
+        no longer comply with the updated policy.
+        
+        :param policy_id: str
+          Canonical unique identifier for the cluster policy.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          A page token that can be used to navigate to the next page or previous page as returned by
+          `next_page_token` or `prev_page_token`.
+        
+        :returns: Iterator over :class:`JobCompliance`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        if policy_id is not None: query['policy_id'] = policy_id
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.0/policies/jobs/list-compliance', query=query, headers=headers)
+            if 'jobs' in json:
+                for v in json['jobs']:
+                    yield JobCompliance.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py
index 57cd4f38f..1a2dedf31 100755
--- a/databricks/sdk/service/marketplace.py
+++ b/databricks/sdk/service/marketplace.py
@@ -56,7 +56,6 @@ class AssetType(Enum):
     ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA'
     ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL'
     ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK'
-    ASSET_TYPE_UNSPECIFIED = 'ASSET_TYPE_UNSPECIFIED'
 
 
 @dataclass
@@ -804,11 +803,6 @@ class FileStatus(Enum):
     FILE_STATUS_STAGING = 'FILE_STATUS_STAGING'
 
 
-class FilterType(Enum):
-
-    METASTORE = 'METASTORE'
-
-
 class FulfillmentType(Enum):
 
     INSTALL = 'INSTALL'
@@ -1297,16 +1291,11 @@ class Listing:
 
     id: Optional[str] = None
 
-    provider_summary: Optional[ProviderListingSummaryInfo] = None
-    """we can not use just ProviderListingSummary since we already have same name on entity side of the
-    state"""
-
     def as_dict(self) -> dict:
         """Serializes the Listing into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.detail: body['detail'] = self.detail.as_dict()
         if self.id is not None: body['id'] = self.id
-        if self.provider_summary: body['provider_summary'] = self.provider_summary.as_dict()
         if self.summary: body['summary'] = self.summary.as_dict()
         return body
 
@@ -1315,7 +1304,6 @@ def from_dict(cls, d: Dict[str, any]) -> Listing:
         """Deserializes the Listing from a dictionary."""
         return cls(detail=_from_dict(d, 'detail', ListingDetail),
                    id=d.get('id', None),
-                   provider_summary=_from_dict(d, 'provider_summary', ProviderListingSummaryInfo),
                    summary=_from_dict(d, 'summary', ListingSummary))
 
 
@@ -1461,23 +1449,18 @@ def from_dict(cls, d: Dict[str, any]) -> ListingFulfillment:
 
 @dataclass
 class ListingSetting:
-    filters: Optional[List[VisibilityFilter]] = None
-    """filters are joined with `or` conjunction."""
-
     visibility: Optional[Visibility] = None
 
     def as_dict(self) -> dict:
         """Serializes the ListingSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.filters: body['filters'] = [v.as_dict() for v in self.filters]
         if self.visibility is not None: body['visibility'] = self.visibility.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingSetting:
         """Deserializes the ListingSetting from a dictionary."""
-        return cls(filters=_repeated_dict(d, 'filters', VisibilityFilter),
-                   visibility=_enum(d, 'visibility', Visibility))
+        return cls(visibility=_enum(d, 'visibility', Visibility))
 
 
 class ListingShareType(Enum):
@@ -1517,8 +1500,6 @@ class ListingSummary:
     """if a git repo is being created, a listing will be initialized with this field as opposed to a
     share"""
 
-    metastore_id: Optional[str] = None
-
     provider_id: Optional[str] = None
 
     provider_region: Optional[RegionInfo] = None
@@ -1552,7 +1533,6 @@ def as_dict(self) -> dict:
         if self.exchange_ids: body['exchange_ids'] = [v for v in self.exchange_ids]
         if self.git_repo: body['git_repo'] = self.git_repo.as_dict()
         if self.listing_type is not None: body['listingType'] = self.listing_type.value
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
         if self.provider_id is not None: body['provider_id'] = self.provider_id
         if self.provider_region: body['provider_region'] = self.provider_region.as_dict()
@@ -1577,7 +1557,6 @@ def from_dict(cls, d: Dict[str, any]) -> ListingSummary:
                    exchange_ids=d.get('exchange_ids', None),
                    git_repo=_from_dict(d, 'git_repo', RepoInfo),
                    listing_type=_enum(d, 'listingType', ListingType),
-                   metastore_id=d.get('metastore_id', None),
                    name=d.get('name', None),
                    provider_id=d.get('provider_id', None),
                    provider_region=_from_dict(d, 'provider_region', RegionInfo),
@@ -1617,7 +1596,6 @@ class ListingTagType(Enum):
 
     LISTING_TAG_TYPE_LANGUAGE = 'LISTING_TAG_TYPE_LANGUAGE'
     LISTING_TAG_TYPE_TASK = 'LISTING_TAG_TYPE_TASK'
-    LISTING_TAG_TYPE_UNSPECIFIED = 'LISTING_TAG_TYPE_UNSPECIFIED'
 
 
 class ListingType(Enum):
@@ -1733,37 +1711,6 @@ def from_dict(cls, d: Dict[str, any]) -> ProviderAnalyticsDashboard:
         return cls(id=d.get('id', None))
 
 
-@dataclass
-class ProviderIconFile:
-    icon_file_id: Optional[str] = None
-
-    icon_file_path: Optional[str] = None
-
-    icon_type: Optional[ProviderIconType] = None
-
-    def as_dict(self) -> dict:
-        """Serializes the ProviderIconFile into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id
-        if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path
-        if self.icon_type is not None: body['icon_type'] = self.icon_type.value
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ProviderIconFile:
-        """Deserializes the ProviderIconFile from a dictionary."""
-        return cls(icon_file_id=d.get('icon_file_id', None),
-                   icon_file_path=d.get('icon_file_path', None),
-                   icon_type=_enum(d, 'icon_type', ProviderIconType))
-
-
-class ProviderIconType(Enum):
-
-    DARK = 'DARK'
-    PRIMARY = 'PRIMARY'
-    PROVIDER_ICON_TYPE_UNSPECIFIED = 'PROVIDER_ICON_TYPE_UNSPECIFIED'
-
-
 @dataclass
 class ProviderInfo:
     name: str
@@ -1837,33 +1784,6 @@ def from_dict(cls, d: Dict[str, any]) -> ProviderInfo:
                    term_of_service_link=d.get('term_of_service_link', None))
 
 
-@dataclass
-class ProviderListingSummaryInfo:
-    """we can not use just ProviderListingSummary since we already have same name on entity side of the
-    state"""
-
-    description: Optional[str] = None
-
-    icon_files: Optional[List[ProviderIconFile]] = None
-
-    name: Optional[str] = None
-
-    def as_dict(self) -> dict:
-        """Serializes the ProviderListingSummaryInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.icon_files: body['icon_files'] = [v.as_dict() for v in self.icon_files]
-        if self.name is not None: body['name'] = self.name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ProviderListingSummaryInfo:
-        """Deserializes the ProviderListingSummaryInfo from a dictionary."""
-        return cls(description=d.get('description', None),
-                   icon_files=_repeated_dict(d, 'icon_files', ProviderIconFile),
-                   name=d.get('name', None))
-
-
 @dataclass
 class RegionInfo:
     cloud: Optional[str] = None
@@ -1996,14 +1916,6 @@ def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
         return cls(data_object_type=d.get('data_object_type', None), name=d.get('name', None))
 
 
-class SortBy(Enum):
-
-    SORT_BY_DATE = 'SORT_BY_DATE'
-    SORT_BY_RELEVANCE = 'SORT_BY_RELEVANCE'
-    SORT_BY_TITLE = 'SORT_BY_TITLE'
-    SORT_BY_UNSPECIFIED = 'SORT_BY_UNSPECIFIED'
-
-
 @dataclass
 class TokenDetail:
     bearer_token: Optional[str] = None
@@ -2369,25 +2281,6 @@ class Visibility(Enum):
     PUBLIC = 'PUBLIC'
 
 
-@dataclass
-class VisibilityFilter:
-    filter_type: Optional[FilterType] = None
-
-    filter_value: Optional[str] = None
-
-    def as_dict(self) -> dict:
-        """Serializes the VisibilityFilter into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.filter_type is not None: body['filterType'] = self.filter_type.value
-        if self.filter_value is not None: body['filterValue'] = self.filter_value
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> VisibilityFilter:
-        """Deserializes the VisibilityFilter from a dictionary."""
-        return cls(filter_type=_enum(d, 'filterType', FilterType), filter_value=d.get('filterValue', None))
-
-
 class ConsumerFulfillmentsAPI:
     """Fulfillments are entities that allow consumers to preview installations."""
 
@@ -2667,14 +2560,12 @@ def list(self,
              *,
              assets: Optional[List[AssetType]] = None,
              categories: Optional[List[Category]] = None,
-             is_ascending: Optional[bool] = None,
              is_free: Optional[bool] = None,
              is_private_exchange: Optional[bool] = None,
              is_staff_pick: Optional[bool] = None,
              page_size: Optional[int] = None,
              page_token: Optional[str] = None,
              provider_ids: Optional[List[str]] = None,
-             sort_by: Optional[SortBy] = None,
              tags: Optional[List[ListingTag]] = None) -> Iterator[Listing]:
         """List listings.
         
@@ -2684,7 +2575,6 @@ def list(self,
           Matches any of the following asset types
         :param categories: List[:class:`Category`] (optional)
           Matches any of the following categories
-        :param is_ascending: bool (optional)
         :param is_free: bool (optional)
           Filters each listing based on if it is free.
         :param is_private_exchange: bool (optional)
@@ -2695,8 +2585,6 @@ def list(self,
         :param page_token: str (optional)
         :param provider_ids: List[str] (optional)
           Matches any of the following provider ids
-        :param sort_by: :class:`SortBy` (optional)
-          Criteria for sorting the resulting set of listings.
         :param tags: List[:class:`ListingTag`] (optional)
           Matches any of the following tags
         
@@ -2706,14 +2594,12 @@ def list(self,
         query = {}
         if assets is not None: query['assets'] = [v.value for v in assets]
         if categories is not None: query['categories'] = [v.value for v in categories]
-        if is_ascending is not None: query['is_ascending'] = is_ascending
         if is_free is not None: query['is_free'] = is_free
         if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange
         if is_staff_pick is not None: query['is_staff_pick'] = is_staff_pick
         if page_size is not None: query['page_size'] = page_size
         if page_token is not None: query['page_token'] = page_token
         if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids]
-        if sort_by is not None: query['sort_by'] = sort_by.value
         if tags is not None: query['tags'] = [v.as_dict() for v in tags]
         headers = {'Accept': 'application/json', }
 
@@ -2731,13 +2617,11 @@ def search(self,
                *,
                assets: Optional[List[AssetType]] = None,
                categories: Optional[List[Category]] = None,
-               is_ascending: Optional[bool] = None,
                is_free: Optional[bool] = None,
                is_private_exchange: Optional[bool] = None,
                page_size: Optional[int] = None,
                page_token: Optional[str] = None,
-               provider_ids: Optional[List[str]] = None,
-               sort_by: Optional[SortBy] = None) -> Iterator[Listing]:
+               provider_ids: Optional[List[str]] = None) -> Iterator[Listing]:
         """Search listings.
         
         Search published listings in the Databricks Marketplace that the consumer has access to. This query
@@ -2749,14 +2633,12 @@ def search(self,
           Matches any of the following asset types
         :param categories: List[:class:`Category`] (optional)
           Matches any of the following categories
-        :param is_ascending: bool (optional)
         :param is_free: bool (optional)
         :param is_private_exchange: bool (optional)
         :param page_size: int (optional)
         :param page_token: str (optional)
         :param provider_ids: List[str] (optional)
           Matches any of the following provider ids
-        :param sort_by: :class:`SortBy` (optional)
         
         :returns: Iterator over :class:`Listing`
         """
@@ -2764,14 +2646,12 @@ def search(self,
         query = {}
         if assets is not None: query['assets'] = [v.value for v in assets]
         if categories is not None: query['categories'] = [v.value for v in categories]
-        if is_ascending is not None: query['is_ascending'] = is_ascending
         if is_free is not None: query['is_free'] = is_free
         if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange
         if page_size is not None: query['page_size'] = page_size
         if page_token is not None: query['page_token'] = page_token
         if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids]
         if query is not None: query['query'] = query
-        if sort_by is not None: query['sort_by'] = sort_by.value
         headers = {'Accept': 'application/json', }
 
         while True:
diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py
index 4d79ef72c..b2cec8126 100755
--- a/databricks/sdk/service/ml.py
+++ b/databricks/sdk/service/ml.py
@@ -4143,10 +4143,16 @@ def list_artifacts(self,
         """Get all artifacts.
         
         List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response
-        contains only artifacts with the specified prefix.",
+        contains only artifacts with the specified prefix. This API does not support pagination when listing
+        artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
+        `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
+        pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
         
         :param page_token: str (optional)
-          Token indicating the page of artifact results to fetch
+          Token indicating the page of artifact results to fetch. `page_token` is not supported when listing
+          artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
+          `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
+          pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
         :param path: str (optional)
           Filter artifacts matching this path (a relative path from the root artifact directory).
         :param run_id: str (optional)
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 44132ee88..0c439ae7e 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -15,14 +15,14 @@
 
 @dataclass
 class CreateCustomAppIntegration:
-    name: str
-    """name of the custom oauth app"""
+    confidential: Optional[bool] = None
+    """This field indicates whether an OAuth client secret is required to authenticate this client."""
 
-    redirect_urls: List[str]
-    """List of oauth redirect urls"""
+    name: Optional[str] = None
+    """Name of the custom OAuth app"""
 
-    confidential: Optional[bool] = None
-    """indicates if an oauth client-secret should be generated"""
+    redirect_urls: Optional[List[str]] = None
+    """List of OAuth redirect urls"""
 
     scopes: Optional[List[str]] = None
     """OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access,
@@ -54,14 +54,14 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegration:
 @dataclass
 class CreateCustomAppIntegrationOutput:
     client_id: Optional[str] = None
-    """oauth client-id generated by the Databricks"""
+    """OAuth client-id generated by the Databricks"""
 
     client_secret: Optional[str] = None
-    """oauth client-secret generated by the Databricks if this is a confidential oauth app
+    """OAuth client-secret generated by the Databricks. If this is a confidential OAuth app
     client-secret will be generated."""
 
     integration_id: Optional[str] = None
-    """unique integration id for the custom oauth app"""
+    """Unique integration id for the custom OAuth app"""
 
     def as_dict(self) -> dict:
         """Serializes the CreateCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
@@ -82,7 +82,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegrationOutput:
 @dataclass
 class CreatePublishedAppIntegration:
     app_id: Optional[str] = None
-    """app_id of the oauth published app integration. For example power-bi, tableau-deskop"""
+    """App id of the OAuth published app integration. For example power-bi, tableau-deskop"""
 
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy"""
@@ -104,7 +104,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegration:
 @dataclass
 class CreatePublishedAppIntegrationOutput:
     integration_id: Optional[str] = None
-    """unique integration id for the published oauth app"""
+    """Unique integration id for the published OAuth app"""
 
     def as_dict(self) -> dict:
         """Serializes the CreatePublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
@@ -227,19 +227,27 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
 @dataclass
 class GetCustomAppIntegrationOutput:
     client_id: Optional[str] = None
-    """oauth client id of the custom oauth app"""
+    """The client id of the custom OAuth app"""
 
     confidential: Optional[bool] = None
-    """indicates if an oauth client-secret should be generated"""
+    """This field indicates whether an OAuth client secret is required to authenticate this client."""
+
+    create_time: Optional[str] = None
+
+    created_by: Optional[int] = None
+
+    creator_username: Optional[str] = None
 
     integration_id: Optional[str] = None
     """ID of this custom app"""
 
     name: Optional[str] = None
-    """name of the custom oauth app"""
+    """The display name of the custom OAuth app"""
 
     redirect_urls: Optional[List[str]] = None
-    """List of oauth redirect urls"""
+    """List of OAuth redirect urls"""
+
+    scopes: Optional[List[str]] = None
 
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy"""
@@ -249,9 +257,13 @@ def as_dict(self) -> dict:
         body = {}
         if self.client_id is not None: body['client_id'] = self.client_id
         if self.confidential is not None: body['confidential'] = self.confidential
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.creator_username is not None: body['creator_username'] = self.creator_username
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         if self.name is not None: body['name'] = self.name
         if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
+        if self.scopes: body['scopes'] = [v for v in self.scopes]
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
@@ -260,39 +272,51 @@ def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationOutput:
         """Deserializes the GetCustomAppIntegrationOutput from a dictionary."""
         return cls(client_id=d.get('client_id', None),
                    confidential=d.get('confidential', None),
+                   create_time=d.get('create_time', None),
+                   created_by=d.get('created_by', None),
+                   creator_username=d.get('creator_username', None),
                    integration_id=d.get('integration_id', None),
                    name=d.get('name', None),
                    redirect_urls=d.get('redirect_urls', None),
+                   scopes=d.get('scopes', None),
                    token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
 
 
 @dataclass
 class GetCustomAppIntegrationsOutput:
     apps: Optional[List[GetCustomAppIntegrationOutput]] = None
-    """Array of Custom OAuth App Integrations defined for the account."""
+    """List of Custom OAuth App Integrations defined for the account."""
+
+    next_page_token: Optional[str] = None
 
     def as_dict(self) -> dict:
         """Serializes the GetCustomAppIntegrationsOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.apps: body['apps'] = [v.as_dict() for v in self.apps]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationsOutput:
         """Deserializes the GetCustomAppIntegrationsOutput from a dictionary."""
-        return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput))
+        return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput),
+                   next_page_token=d.get('next_page_token', None))
 
 
 @dataclass
 class GetPublishedAppIntegrationOutput:
     app_id: Optional[str] = None
-    """app-id of the published app integration"""
+    """App-id of the published app integration"""
+
+    create_time: Optional[str] = None
+
+    created_by: Optional[int] = None
 
     integration_id: Optional[str] = None
-    """unique integration id for the published oauth app"""
+    """Unique integration id for the published OAuth app"""
 
     name: Optional[str] = None
-    """name of the published oauth app"""
+    """Display name of the published OAuth app"""
 
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy"""
@@ -301,6 +325,8 @@ def as_dict(self) -> dict:
         """Serializes the GetPublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.app_id is not None: body['app_id'] = self.app_id
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.created_by is not None: body['created_by'] = self.created_by
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         if self.name is not None: body['name'] = self.name
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
@@ -310,6 +336,8 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput:
         """Deserializes the GetPublishedAppIntegrationOutput from a dictionary."""
         return cls(app_id=d.get('app_id', None),
+                   create_time=d.get('create_time', None),
+                   created_by=d.get('created_by', None),
                    integration_id=d.get('integration_id', None),
                    name=d.get('name', None),
                    token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
@@ -318,24 +346,28 @@ def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput:
 @dataclass
 class GetPublishedAppIntegrationsOutput:
     apps: Optional[List[GetPublishedAppIntegrationOutput]] = None
-    """Array of Published OAuth App Integrations defined for the account."""
+    """List of Published OAuth App Integrations defined for the account."""
+
+    next_page_token: Optional[str] = None
 
     def as_dict(self) -> dict:
         """Serializes the GetPublishedAppIntegrationsOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.apps: body['apps'] = [v.as_dict() for v in self.apps]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationsOutput:
         """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary."""
-        return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput))
+        return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput),
+                   next_page_token=d.get('next_page_token', None))
 
 
 @dataclass
 class GetPublishedAppsOutput:
     apps: Optional[List[PublishedAppOutput]] = None
-    """Array of Published OAuth Apps."""
+    """List of Published OAuth Apps."""
 
     next_page_token: Optional[str] = None
     """A token that can be used to get the next page of results. If not present, there are no more
@@ -388,7 +420,7 @@ class PublishedAppOutput:
     apps."""
 
     name: Optional[str] = None
-    """Name of the published OAuth app."""
+    """The display name of the published OAuth app."""
 
     redirect_urls: Optional[List[str]] = None
     """Redirect URLs of the published OAuth app."""
@@ -485,13 +517,12 @@ def from_dict(cls, d: Dict[str, any]) -> TokenAccessPolicy:
 @dataclass
 class UpdateCustomAppIntegration:
     integration_id: Optional[str] = None
-    """The oauth app integration ID."""
 
     redirect_urls: Optional[List[str]] = None
-    """List of oauth redirect urls to be updated in the custom oauth app integration"""
+    """List of OAuth redirect urls to be updated in the custom OAuth app integration"""
 
     token_access_policy: Optional[TokenAccessPolicy] = None
-    """Token access policy to be updated in the custom oauth app integration"""
+    """Token access policy to be updated in the custom OAuth app integration"""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body."""
@@ -526,10 +557,9 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegrationOutput:
 @dataclass
 class UpdatePublishedAppIntegration:
     integration_id: Optional[str] = None
-    """The oauth app integration ID."""
 
     token_access_policy: Optional[TokenAccessPolicy] = None
-    """Token access policy to be updated in the published oauth app integration"""
+    """Token access policy to be updated in the published OAuth app integration"""
 
     def as_dict(self) -> dict:
         """Serializes the UpdatePublishedAppIntegration into a dictionary suitable for use as a JSON request body."""
@@ -560,31 +590,31 @@ def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegrationOutput:
 
 
 class CustomAppIntegrationAPI:
-    """These APIs enable administrators to manage custom oauth app integrations, which is required for
+    """These APIs enable administrators to manage custom OAuth app integrations, which is required for
     adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
 
     def __init__(self, api_client):
         self._api = api_client
 
     def create(self,
-               name: str,
-               redirect_urls: List[str],
                *,
                confidential: Optional[bool] = None,
+               name: Optional[str] = None,
+               redirect_urls: Optional[List[str]] = None,
                scopes: Optional[List[str]] = None,
                token_access_policy: Optional[TokenAccessPolicy] = None) -> CreateCustomAppIntegrationOutput:
         """Create Custom OAuth App Integration.
         
         Create Custom OAuth App Integration.
         
-        You can retrieve the custom oauth app integration via :method:CustomAppIntegration/get.
+        You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get.
         
-        :param name: str
-          name of the custom oauth app
-        :param redirect_urls: List[str]
-          List of oauth redirect urls
         :param confidential: bool (optional)
-          indicates if an oauth client-secret should be generated
+          This field indicates whether an OAuth client secret is required to authenticate this client.
+        :param name: str (optional)
+          Name of the custom OAuth app
+        :param redirect_urls: List[str] (optional)
+          List of OAuth redirect urls
         :param scopes: List[str] (optional)
           OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid,
           profile, email.
@@ -610,11 +640,10 @@ def create(self,
     def delete(self, integration_id: str):
         """Delete Custom OAuth App Integration.
         
-        Delete an existing Custom OAuth App Integration. You can retrieve the custom oauth app integration via
+        Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via
         :method:CustomAppIntegration/get.
         
         :param integration_id: str
-          The oauth app integration ID.
         
         
         """
@@ -632,7 +661,6 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput:
         Gets the Custom OAuth App Integration for the given integration id.
         
         :param integration_id: str
-          The oauth app integration ID.
         
         :returns: :class:`GetCustomAppIntegrationOutput`
         """
@@ -645,21 +673,39 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput:
             headers=headers)
         return GetCustomAppIntegrationOutput.from_dict(res)
 
-    def list(self) -> Iterator[GetCustomAppIntegrationOutput]:
+    def list(self,
+             *,
+             include_creator_username: Optional[bool] = None,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[GetCustomAppIntegrationOutput]:
         """Get custom oauth app integrations.
         
-        Get the list of custom oauth app integrations for the specified Databricks account
+        Get the list of custom OAuth app integrations for the specified Databricks account
+        
+        :param include_creator_username: bool (optional)
+        :param page_size: int (optional)
+        :param page_token: str (optional)
         
         :returns: Iterator over :class:`GetCustomAppIntegrationOutput`
         """
 
+        query = {}
+        if include_creator_username is not None: query['include_creator_username'] = include_creator_username
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET',
-                            f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations',
-                            headers=headers)
-        parsed = GetCustomAppIntegrationsOutput.from_dict(json).apps
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations',
+                                query=query,
+                                headers=headers)
+            if 'apps' in json:
+                for v in json['apps']:
+                    yield GetCustomAppIntegrationOutput.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
     def update(self,
                integration_id: str,
@@ -668,15 +714,14 @@ def update(self,
                token_access_policy: Optional[TokenAccessPolicy] = None):
         """Updates Custom OAuth App Integration.
         
-        Updates an existing custom OAuth App Integration. You can retrieve the custom oauth app integration
+        Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
         via :method:CustomAppIntegration/get.
         
         :param integration_id: str
-          The oauth app integration ID.
         :param redirect_urls: List[str] (optional)
-          List of oauth redirect urls to be updated in the custom oauth app integration
+          List of OAuth redirect urls to be updated in the custom OAuth app integration
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
-          Token access policy to be updated in the custom oauth app integration
+          Token access policy to be updated in the custom OAuth app integration
         
         
         """
@@ -709,7 +754,7 @@ def list(self,
         Get all the available published OAuth apps in Databricks.
         
         :param page_size: int (optional)
-          The max number of OAuth published apps to return.
+          The max number of OAuth published apps to return in one page.
         :param page_token: str (optional)
           A token that can be used to get the next page of results.
         
@@ -723,7 +768,7 @@ def list(self,
 
         while True:
             json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps/',
+                                f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps',
                                 query=query,
                                 headers=headers)
             if 'apps' in json:
@@ -735,7 +780,7 @@ def list(self,
 
 
 class PublishedAppIntegrationAPI:
-    """These APIs enable administrators to manage published oauth app integrations, which is required for
+    """These APIs enable administrators to manage published OAuth app integrations, which is required for
     adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
 
     def __init__(self, api_client):
@@ -750,10 +795,10 @@ def create(
         
         Create Published OAuth App Integration.
         
-        You can retrieve the published oauth app integration via :method:PublishedAppIntegration/get.
+        You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get.
         
         :param app_id: str (optional)
-          app_id of the oauth published app integration. For example power-bi, tableau-deskop
+          App id of the OAuth published app integration. For example power-bi, tableau-deskop
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy
         
@@ -773,11 +818,10 @@ def create(
     def delete(self, integration_id: str):
         """Delete Published OAuth App Integration.
         
-        Delete an existing Published OAuth App Integration. You can retrieve the published oauth app
+        Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app
         integration via :method:PublishedAppIntegration/get.
         
         :param integration_id: str
-          The oauth app integration ID.
         
         
         """
@@ -795,7 +839,6 @@ def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput:
         Gets the Published OAuth App Integration for the given integration id.
         
         :param integration_id: str
-          The oauth app integration ID.
         
         :returns: :class:`GetPublishedAppIntegrationOutput`
         """
@@ -808,32 +851,46 @@ def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput:
             headers=headers)
         return GetPublishedAppIntegrationOutput.from_dict(res)
 
-    def list(self) -> Iterator[GetPublishedAppIntegrationOutput]:
+    def list(self,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[GetPublishedAppIntegrationOutput]:
         """Get published oauth app integrations.
         
-        Get the list of published oauth app integrations for the specified Databricks account
+        Get the list of published OAuth app integrations for the specified Databricks account
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
         
         :returns: Iterator over :class:`GetPublishedAppIntegrationOutput`
         """
 
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET',
-                            f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations',
-                            headers=headers)
-        parsed = GetPublishedAppIntegrationsOutput.from_dict(json).apps
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations',
+                                query=query,
+                                headers=headers)
+            if 'apps' in json:
+                for v in json['apps']:
+                    yield GetPublishedAppIntegrationOutput.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
     def update(self, integration_id: str, *, token_access_policy: Optional[TokenAccessPolicy] = None):
         """Updates Published OAuth App Integration.
         
-        Updates an existing published OAuth App Integration. You can retrieve the published oauth app
+        Updates an existing published OAuth App Integration. You can retrieve the published OAuth app
         integration via :method:PublishedAppIntegration/get.
         
         :param integration_id: str
-          The oauth app integration ID.
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
-          Token access policy to be updated in the published oauth app integration
+          Token access policy to be updated in the published OAuth app integration
         
         
         """
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index bba59811d..9c12f8788 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -25,6 +25,9 @@ class CreatePipeline:
     allow_duplicate_names: Optional[bool] = None
     """If false, deployment will fail if name conflicts with that of another pipeline."""
 
+    budget_policy_id: Optional[str] = None
+    """Budget policy of this pipeline."""
+
     catalog: Optional[str] = None
     """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
     tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -63,7 +66,7 @@ class CreatePipeline:
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
 
-    ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None
+    ingestion_definition: Optional[IngestionPipelineDefinition] = None
     """The configuration for a managed ingestion pipeline. These settings cannot be used with the
     'libraries', 'target' or 'catalog' settings."""
 
@@ -79,6 +82,10 @@ class CreatePipeline:
     photon: Optional[bool] = None
     """Whether Photon is enabled for this pipeline."""
 
+    schema: Optional[str] = None
+    """The default schema (database) where tables are read from or published to. The presence of this
+    field implies that the pipeline is in direct publishing mode."""
+
     serverless: Optional[bool] = None
     """Whether serverless compute is enabled for this pipeline."""
 
@@ -97,6 +104,7 @@ def as_dict(self) -> dict:
         """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.catalog is not None: body['catalog'] = self.catalog
         if self.channel is not None: body['channel'] = self.channel
         if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
@@ -114,6 +122,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
+        if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
         if self.target is not None: body['target'] = self.target
@@ -124,6 +133,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
         """Deserializes the CreatePipeline from a dictionary."""
         return cls(allow_duplicate_names=d.get('allow_duplicate_names', None),
+                   budget_policy_id=d.get('budget_policy_id', None),
                    catalog=d.get('catalog', None),
                    channel=d.get('channel', None),
                    clusters=_repeated_dict(d, 'clusters', PipelineCluster),
@@ -136,12 +146,12 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
                    filters=_from_dict(d, 'filters', Filters),
                    gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
                    id=d.get('id', None),
-                   ingestion_definition=_from_dict(d, 'ingestion_definition',
-                                                   ManagedIngestionPipelineDefinition),
+                   ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition),
                    libraries=_repeated_dict(d, 'libraries', PipelineLibrary),
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
+                   schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
                    target=d.get('target', None),
@@ -237,6 +247,9 @@ class EditPipeline:
     allow_duplicate_names: Optional[bool] = None
     """If false, deployment will fail if name has changed and conflicts the name of another pipeline."""
 
+    budget_policy_id: Optional[str] = None
+    """Budget policy of this pipeline."""
+
     catalog: Optional[str] = None
     """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
     tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -277,7 +290,7 @@ class EditPipeline:
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
 
-    ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None
+    ingestion_definition: Optional[IngestionPipelineDefinition] = None
     """The configuration for a managed ingestion pipeline. These settings cannot be used with the
     'libraries', 'target' or 'catalog' settings."""
 
@@ -296,6 +309,10 @@ class EditPipeline:
     pipeline_id: Optional[str] = None
     """Unique identifier for this pipeline."""
 
+    schema: Optional[str] = None
+    """The default schema (database) where tables are read from or published to. The presence of this
+    field implies that the pipeline is in direct publishing mode."""
+
     serverless: Optional[bool] = None
     """Whether serverless compute is enabled for this pipeline."""
 
@@ -314,6 +331,7 @@ def as_dict(self) -> dict:
         """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.catalog is not None: body['catalog'] = self.catalog
         if self.channel is not None: body['channel'] = self.channel
         if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
@@ -333,6 +351,7 @@ def as_dict(self) -> dict:
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
         if self.target is not None: body['target'] = self.target
@@ -343,6 +362,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
         """Deserializes the EditPipeline from a dictionary."""
         return cls(allow_duplicate_names=d.get('allow_duplicate_names', None),
+                   budget_policy_id=d.get('budget_policy_id', None),
                    catalog=d.get('catalog', None),
                    channel=d.get('channel', None),
                    clusters=_repeated_dict(d, 'clusters', PipelineCluster),
@@ -355,13 +375,13 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
                    filters=_from_dict(d, 'filters', Filters),
                    gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
                    id=d.get('id', None),
-                   ingestion_definition=_from_dict(d, 'ingestion_definition',
-                                                   ManagedIngestionPipelineDefinition),
+                   ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition),
                    libraries=_repeated_dict(d, 'libraries', PipelineLibrary),
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
                    pipeline_id=d.get('pipeline_id', None),
+                   schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
                    target=d.get('target', None),
@@ -479,6 +499,9 @@ class GetPipelineResponse:
     creator_user_name: Optional[str] = None
     """The username of the pipeline creator."""
 
+    effective_budget_policy_id: Optional[str] = None
+    """Serverless budget policy ID of this pipeline."""
+
     health: Optional[GetPipelineResponseHealth] = None
     """The health of a pipeline."""
 
@@ -509,6 +532,8 @@ def as_dict(self) -> dict:
         if self.cause is not None: body['cause'] = self.cause
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
         if self.health is not None: body['health'] = self.health.value
         if self.last_modified is not None: body['last_modified'] = self.last_modified
         if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates]
@@ -525,6 +550,7 @@ def from_dict(cls, d: Dict[str, any]) -> GetPipelineResponse:
         return cls(cause=d.get('cause', None),
                    cluster_id=d.get('cluster_id', None),
                    creator_user_name=d.get('creator_user_name', None),
+                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
                    health=_enum(d, 'health', GetPipelineResponseHealth),
                    last_modified=d.get('last_modified', None),
                    latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo),
@@ -561,6 +587,9 @@ def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse:
 
 @dataclass
 class IngestionConfig:
+    report: Optional[ReportSpec] = None
+    """Select tables from a specific source report."""
+
     schema: Optional[SchemaSpec] = None
     """Select tables from a specific source schema."""
 
@@ -570,6 +599,7 @@ class IngestionConfig:
     def as_dict(self) -> dict:
         """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.report: body['report'] = self.report.as_dict()
         if self.schema: body['schema'] = self.schema.as_dict()
         if self.table: body['table'] = self.table.as_dict()
         return body
@@ -577,7 +607,9 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
         """Deserializes the IngestionConfig from a dictionary."""
-        return cls(schema=_from_dict(d, 'schema', SchemaSpec), table=_from_dict(d, 'table', TableSpec))
+        return cls(report=_from_dict(d, 'report', ReportSpec),
+                   schema=_from_dict(d, 'schema', SchemaSpec),
+                   table=_from_dict(d, 'table', TableSpec))
 
 
 @dataclass
@@ -590,7 +622,7 @@ class IngestionGatewayPipelineDefinition:
     """Required, Immutable. The name of the catalog for the gateway pipeline's storage location."""
 
     gateway_storage_name: Optional[str] = None
-    """Required. The Unity Catalog-compatible naming for the gateway storage location. This is the
+    """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the
     destination to use for the data that is extracted by the gateway. Delta Live Tables system will
     automatically create the storage location under the catalog and schema."""
 
@@ -617,6 +649,41 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
                    gateway_storage_schema=d.get('gateway_storage_schema', None))
 
 
+@dataclass
+class IngestionPipelineDefinition:
+    connection_name: Optional[str] = None
+    """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the
+    source. Specify either ingestion_gateway_id or connection_name."""
+
+    ingestion_gateway_id: Optional[str] = None
+    """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate
+    with the source. Specify either ingestion_gateway_id or connection_name."""
+
+    objects: Optional[List[IngestionConfig]] = None
+    """Required. Settings specifying tables to replicate and the destination for the replicated tables."""
+
+    table_configuration: Optional[TableSpecificConfig] = None
+    """Configuration settings to control the ingestion of tables. These settings are applied to all
+    tables in the pipeline."""
+
+    def as_dict(self) -> dict:
+        """Serializes the IngestionPipelineDefinition into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id
+        if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
+        if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> IngestionPipelineDefinition:
+        """Deserializes the IngestionPipelineDefinition from a dictionary."""
+        return cls(connection_name=d.get('connection_name', None),
+                   ingestion_gateway_id=d.get('ingestion_gateway_id', None),
+                   objects=_repeated_dict(d, 'objects', IngestionConfig),
+                   table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
+
+
 @dataclass
 class ListPipelineEventsResponse:
     events: Optional[List[PipelineEvent]] = None
@@ -693,41 +760,6 @@ def from_dict(cls, d: Dict[str, any]) -> ListUpdatesResponse:
                    updates=_repeated_dict(d, 'updates', UpdateInfo))
 
 
-@dataclass
-class ManagedIngestionPipelineDefinition:
-    connection_name: Optional[str] = None
-    """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the
-    source. Specify either ingestion_gateway_id or connection_name."""
-
-    ingestion_gateway_id: Optional[str] = None
-    """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate
-    with the source. Specify either ingestion_gateway_id or connection_name."""
-
-    objects: Optional[List[IngestionConfig]] = None
-    """Required. Settings specifying tables to replicate and the destination for the replicated tables."""
-
-    table_configuration: Optional[TableSpecificConfig] = None
-    """Configuration settings to control the ingestion of tables. These settings are applied to all
-    tables in the pipeline."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ManagedIngestionPipelineDefinition into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id
-        if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
-        if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ManagedIngestionPipelineDefinition:
-        """Deserializes the ManagedIngestionPipelineDefinition from a dictionary."""
-        return cls(connection_name=d.get('connection_name', None),
-                   ingestion_gateway_id=d.get('ingestion_gateway_id', None),
-                   objects=_repeated_dict(d, 'objects', IngestionConfig),
-                   table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
-
-
 @dataclass
 class ManualTrigger:
 
@@ -1003,6 +1035,9 @@ class PipelineCluster:
     """The node type of the Spark driver. Note that this field is optional; if unset, the driver node
     type will be set as the same value as `node_type_id` defined above."""
 
+    enable_local_disk_encryption: Optional[bool] = None
+    """Whether to enable local disk encryption for the cluster."""
+
     gcp_attributes: Optional[compute.GcpAttributes] = None
     """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
     creation, a set of default values will be used."""
@@ -1074,6 +1109,8 @@ def as_dict(self) -> dict:
         if self.driver_instance_pool_id is not None:
             body['driver_instance_pool_id'] = self.driver_instance_pool_id
         if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
@@ -1097,6 +1134,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineCluster:
                    custom_tags=d.get('custom_tags', None),
                    driver_instance_pool_id=d.get('driver_instance_pool_id', None),
                    driver_node_type_id=d.get('driver_node_type_id', None),
+                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
                    gcp_attributes=_from_dict(d, 'gcp_attributes', compute.GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', compute.InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
@@ -1244,6 +1282,9 @@ class PipelineLibrary:
     notebook: Optional[NotebookLibrary] = None
     """The path to a notebook that defines a pipeline and is stored in the Databricks workspace."""
 
+    whl: Optional[str] = None
+    """URI of the whl to be installed."""
+
     def as_dict(self) -> dict:
         """Serializes the PipelineLibrary into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -1251,6 +1292,7 @@ def as_dict(self) -> dict:
         if self.jar is not None: body['jar'] = self.jar
         if self.maven: body['maven'] = self.maven.as_dict()
         if self.notebook: body['notebook'] = self.notebook.as_dict()
+        if self.whl is not None: body['whl'] = self.whl
         return body
 
     @classmethod
@@ -1259,7 +1301,8 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineLibrary:
         return cls(file=_from_dict(d, 'file', FileLibrary),
                    jar=d.get('jar', None),
                    maven=_from_dict(d, 'maven', compute.MavenLibrary),
-                   notebook=_from_dict(d, 'notebook', NotebookLibrary))
+                   notebook=_from_dict(d, 'notebook', NotebookLibrary),
+                   whl=d.get('whl', None))
 
 
 @dataclass
@@ -1367,6 +1410,9 @@ def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsRequest:
 
 @dataclass
 class PipelineSpec:
+    budget_policy_id: Optional[str] = None
+    """Budget policy of this pipeline."""
+
     catalog: Optional[str] = None
     """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
     tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -1403,7 +1449,7 @@ class PipelineSpec:
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
 
-    ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None
+    ingestion_definition: Optional[IngestionPipelineDefinition] = None
     """The configuration for a managed ingestion pipeline. These settings cannot be used with the
     'libraries', 'target' or 'catalog' settings."""
 
@@ -1419,6 +1465,10 @@ class PipelineSpec:
     photon: Optional[bool] = None
     """Whether Photon is enabled for this pipeline."""
 
+    schema: Optional[str] = None
+    """The default schema (database) where tables are read from or published to. The presence of this
+    field implies that the pipeline is in direct publishing mode."""
+
     serverless: Optional[bool] = None
     """Whether serverless compute is enabled for this pipeline."""
 
@@ -1436,6 +1486,7 @@ class PipelineSpec:
     def as_dict(self) -> dict:
         """Serializes the PipelineSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.catalog is not None: body['catalog'] = self.catalog
         if self.channel is not None: body['channel'] = self.channel
         if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
@@ -1452,6 +1503,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
+        if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
         if self.target is not None: body['target'] = self.target
@@ -1461,7 +1513,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
         """Deserializes the PipelineSpec from a dictionary."""
-        return cls(catalog=d.get('catalog', None),
+        return cls(budget_policy_id=d.get('budget_policy_id', None),
+                   catalog=d.get('catalog', None),
                    channel=d.get('channel', None),
                    clusters=_repeated_dict(d, 'clusters', PipelineCluster),
                    configuration=d.get('configuration', None),
@@ -1472,12 +1525,12 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
                    filters=_from_dict(d, 'filters', Filters),
                    gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
                    id=d.get('id', None),
-                   ingestion_definition=_from_dict(d, 'ingestion_definition',
-                                                   ManagedIngestionPipelineDefinition),
+                   ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition),
                    libraries=_repeated_dict(d, 'libraries', PipelineLibrary),
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
+                   schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
                    target=d.get('target', None),
@@ -1506,6 +1559,9 @@ class PipelineStateInfo:
     creator_user_name: Optional[str] = None
     """The username of the pipeline creator."""
 
+    health: Optional[PipelineStateInfoHealth] = None
+    """The health of a pipeline."""
+
     latest_updates: Optional[List[UpdateStateInfo]] = None
     """Status of the latest updates for the pipeline. Ordered with the newest update first."""
 
@@ -1527,6 +1583,7 @@ def as_dict(self) -> dict:
         body = {}
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.health is not None: body['health'] = self.health.value
         if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates]
         if self.name is not None: body['name'] = self.name
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
@@ -1539,6 +1596,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo:
         """Deserializes the PipelineStateInfo from a dictionary."""
         return cls(cluster_id=d.get('cluster_id', None),
                    creator_user_name=d.get('creator_user_name', None),
+                   health=_enum(d, 'health', PipelineStateInfoHealth),
                    latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo),
                    name=d.get('name', None),
                    pipeline_id=d.get('pipeline_id', None),
@@ -1546,6 +1604,13 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo:
                    state=_enum(d, 'state', PipelineState))
 
 
+class PipelineStateInfoHealth(Enum):
+    """The health of a pipeline."""
+
+    HEALTHY = 'HEALTHY'
+    UNHEALTHY = 'UNHEALTHY'
+
+
 @dataclass
 class PipelineTrigger:
     cron: Optional[CronTrigger] = None
@@ -1565,6 +1630,44 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineTrigger:
         return cls(cron=_from_dict(d, 'cron', CronTrigger), manual=_from_dict(d, 'manual', ManualTrigger))
 
 
+@dataclass
+class ReportSpec:
+    destination_catalog: Optional[str] = None
+    """Required. Destination catalog to store table."""
+
+    destination_schema: Optional[str] = None
+    """Required. Destination schema to store table."""
+
+    destination_table: Optional[str] = None
+    """Required. Destination table name. The pipeline fails if a table with that name already exists."""
+
+    source_url: Optional[str] = None
+    """Required. Report URL in the source system."""
+
+    table_configuration: Optional[TableSpecificConfig] = None
+    """Configuration settings to control the ingestion of tables. These settings override the
+    table_configuration defined in the IngestionPipelineDefinition object."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ReportSpec into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
+        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
+        if self.destination_table is not None: body['destination_table'] = self.destination_table
+        if self.source_url is not None: body['source_url'] = self.source_url
+        if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
+        """Deserializes the ReportSpec from a dictionary."""
+        return cls(destination_catalog=d.get('destination_catalog', None),
+                   destination_schema=d.get('destination_schema', None),
+                   destination_table=d.get('destination_table', None),
+                   source_url=d.get('source_url', None),
+                   table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
+
+
 @dataclass
 class SchemaSpec:
     destination_catalog: Optional[str] = None
@@ -1584,7 +1687,7 @@ class SchemaSpec:
     table_configuration: Optional[TableSpecificConfig] = None
     """Configuration settings to control the ingestion of tables. These settings are applied to all
     tables in this schema and override the table_configuration defined in the
-    ManagedIngestionPipelineDefinition object."""
+    IngestionPipelineDefinition object."""
 
     def as_dict(self) -> dict:
         """Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body."""
@@ -1782,7 +1885,7 @@ class TableSpec:
     """Required. Destination schema to store table."""
 
     destination_table: Optional[str] = None
-    """Optional. Destination table name. The pipeline fails If a table with that name already exists.
+    """Optional. Destination table name. The pipeline fails if a table with that name already exists.
     If not set, the source table name is used."""
 
     source_catalog: Optional[str] = None
@@ -1796,7 +1899,7 @@ class TableSpec:
 
     table_configuration: Optional[TableSpecificConfig] = None
     """Configuration settings to control the ingestion of tables. These settings override the
-    table_configuration defined in the ManagedIngestionPipelineDefinition object and the SchemaSpec."""
+    table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec."""
 
     def as_dict(self) -> dict:
         """Serializes the TableSpec into a dictionary suitable for use as a JSON request body."""
@@ -1834,6 +1937,10 @@ class TableSpecificConfig:
     scd_type: Optional[TableSpecificConfigScdType] = None
     """The SCD type to use to ingest the table."""
 
+    sequence_by: Optional[List[str]] = None
+    """The column names specifying the logical order of events in the source data. Delta Live Tables
+    uses this sequencing to handle change events that arrive out of order."""
+
     def as_dict(self) -> dict:
         """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -1841,6 +1948,7 @@ def as_dict(self) -> dict:
         if self.salesforce_include_formula_fields is not None:
             body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
         if self.scd_type is not None: body['scd_type'] = self.scd_type.value
+        if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by]
         return body
 
     @classmethod
@@ -1848,7 +1956,8 @@ def from_dict(cls, d: Dict[str, any]) -> TableSpecificConfig:
         """Deserializes the TableSpecificConfig from a dictionary."""
         return cls(primary_keys=d.get('primary_keys', None),
                    salesforce_include_formula_fields=d.get('salesforce_include_formula_fields', None),
-                   scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType))
+                   scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType),
+                   sequence_by=d.get('sequence_by', None))
 
 
 class TableSpecificConfigScdType(Enum):
@@ -2078,6 +2187,7 @@ def wait_get_pipeline_running(
     def create(self,
                *,
                allow_duplicate_names: Optional[bool] = None,
+               budget_policy_id: Optional[str] = None,
                catalog: Optional[str] = None,
                channel: Optional[str] = None,
                clusters: Optional[List[PipelineCluster]] = None,
@@ -2090,11 +2200,12 @@ def create(self,
                filters: Optional[Filters] = None,
                gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
                id: Optional[str] = None,
-               ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None,
+               ingestion_definition: Optional[IngestionPipelineDefinition] = None,
                libraries: Optional[List[PipelineLibrary]] = None,
                name: Optional[str] = None,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
+               schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
                target: Optional[str] = None,
@@ -2106,6 +2217,8 @@ def create(self,
         
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name conflicts with that of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
         :param catalog: str (optional)
           A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
           in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -2131,7 +2244,7 @@ def create(self,
           The definition of a gateway pipeline to support CDC.
         :param id: str (optional)
           Unique identifier for this pipeline.
-        :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional)
+        :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
           The configuration for a managed ingestion pipeline. These settings cannot be used with the
           'libraries', 'target' or 'catalog' settings.
         :param libraries: List[:class:`PipelineLibrary`] (optional)
@@ -2142,6 +2255,9 @@ def create(self,
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
         :param serverless: bool (optional)
           Whether serverless compute is enabled for this pipeline.
         :param storage: str (optional)
@@ -2156,6 +2272,7 @@ def create(self,
         """
         body = {}
         if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names
+        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
         if catalog is not None: body['catalog'] = catalog
         if channel is not None: body['channel'] = channel
         if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters]
@@ -2173,6 +2290,7 @@ def create(self,
         if name is not None: body['name'] = name
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
+        if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
         if target is not None: body['target'] = target
@@ -2486,6 +2604,7 @@ def update(self,
                pipeline_id: str,
                *,
                allow_duplicate_names: Optional[bool] = None,
+               budget_policy_id: Optional[str] = None,
                catalog: Optional[str] = None,
                channel: Optional[str] = None,
                clusters: Optional[List[PipelineCluster]] = None,
@@ -2498,11 +2617,12 @@ def update(self,
                filters: Optional[Filters] = None,
                gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
                id: Optional[str] = None,
-               ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None,
+               ingestion_definition: Optional[IngestionPipelineDefinition] = None,
                libraries: Optional[List[PipelineLibrary]] = None,
                name: Optional[str] = None,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
+               schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
                target: Optional[str] = None,
@@ -2515,6 +2635,8 @@ def update(self,
           Unique identifier for this pipeline.
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name has changed and conflicts the name of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
         :param catalog: str (optional)
           A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
           in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -2542,7 +2664,7 @@ def update(self,
           The definition of a gateway pipeline to support CDC.
         :param id: str (optional)
           Unique identifier for this pipeline.
-        :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional)
+        :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
           The configuration for a managed ingestion pipeline. These settings cannot be used with the
           'libraries', 'target' or 'catalog' settings.
         :param libraries: List[:class:`PipelineLibrary`] (optional)
@@ -2553,6 +2675,9 @@ def update(self,
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
         :param serverless: bool (optional)
           Whether serverless compute is enabled for this pipeline.
         :param storage: str (optional)
@@ -2567,6 +2692,7 @@ def update(self,
         """
         body = {}
         if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names
+        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
         if catalog is not None: body['catalog'] = catalog
         if channel is not None: body['channel'] = channel
         if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters]
@@ -2584,6 +2710,7 @@ def update(self,
         if name is not None: body['name'] = name
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
+        if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
         if target is not None: body['target'] = target
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index 6c39c598d..7639d96fb 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -10,6 +10,9 @@
 from enum import Enum
 from typing import Any, BinaryIO, Callable, Dict, Iterator, List, Optional
 
+import requests
+
+from ..data_plane import DataPlaneService
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
@@ -22,311 +25,339 @@
 
 @dataclass
 class Ai21LabsConfig:
-    ai21labs_api_key: str
-    """The Databricks secret key reference for an AI21Labs API key."""
+    ai21labs_api_key: Optional[str] = None
+    """The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API
+    key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the
+    following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`."""
+
+    ai21labs_api_key_plaintext: Optional[str] = None
+    """An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using
+    Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the
+    following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`."""
 
     def as_dict(self) -> dict:
         """Serializes the Ai21LabsConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key
+        if self.ai21labs_api_key_plaintext is not None:
+            body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig:
         """Deserializes the Ai21LabsConfig from a dictionary."""
-        return cls(ai21labs_api_key=d.get('ai21labs_api_key', None))
+        return cls(ai21labs_api_key=d.get('ai21labs_api_key', None),
+                   ai21labs_api_key_plaintext=d.get('ai21labs_api_key_plaintext', None))
 
 
 @dataclass
-class AmazonBedrockConfig:
-    aws_region: str
-    """The AWS region to use. Bedrock has to be enabled there."""
+class AiGatewayConfig:
+    guardrails: Optional[AiGatewayGuardrails] = None
+    """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and
+    responses."""
 
-    aws_access_key_id: str
-    """The Databricks secret key reference for an AWS Access Key ID with permissions to interact with
-    Bedrock services."""
+    inference_table_config: Optional[AiGatewayInferenceTableConfig] = None
+    """Configuration for payload logging using inference tables. Use these tables to monitor and audit
+    data being sent to and received from model APIs and to improve model quality."""
 
-    aws_secret_access_key: str
-    """The Databricks secret key reference for an AWS Secret Access Key paired with the access key ID,
-    with permissions to interact with Bedrock services."""
+    rate_limits: Optional[List[AiGatewayRateLimit]] = None
+    """Configuration for rate limits which can be set to limit endpoint traffic."""
 
-    bedrock_provider: AmazonBedrockConfigBedrockProvider
-    """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
-    Anthropic, Cohere, AI21Labs, Amazon."""
+    usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None
+    """Configuration to enable usage tracking using system tables. These tables allow you to monitor
+    operational usage on endpoints and their associated costs."""
 
     def as_dict(self) -> dict:
-        """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AiGatewayConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key
-        if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value
+        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
+        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig:
-        """Deserializes the AmazonBedrockConfig from a dictionary."""
-        return cls(aws_access_key_id=d.get('aws_access_key_id', None),
-                   aws_region=d.get('aws_region', None),
-                   aws_secret_access_key=d.get('aws_secret_access_key', None),
-                   bedrock_provider=_enum(d, 'bedrock_provider', AmazonBedrockConfigBedrockProvider))
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayConfig:
+        """Deserializes the AiGatewayConfig from a dictionary."""
+        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
+                   inference_table_config=_from_dict(d, 'inference_table_config',
+                                                     AiGatewayInferenceTableConfig),
+                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
+                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
 
 
-class AmazonBedrockConfigBedrockProvider(Enum):
-    """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
-    Anthropic, Cohere, AI21Labs, Amazon."""
+@dataclass
+class AiGatewayGuardrailParameters:
+    invalid_keywords: Optional[List[str]] = None
+    """List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword
+    exists in the request or response content."""
 
-    AI21LABS = 'ai21labs'
-    AMAZON = 'amazon'
-    ANTHROPIC = 'anthropic'
-    COHERE = 'cohere'
+    pii: Optional[AiGatewayGuardrailPiiBehavior] = None
+    """Configuration for guardrail PII filter."""
 
+    safety: Optional[bool] = None
+    """Indicates whether the safety filter is enabled."""
 
-@dataclass
-class AnthropicConfig:
-    anthropic_api_key: str
-    """The Databricks secret key reference for an Anthropic API key."""
+    valid_topics: Optional[List[str]] = None
+    """The list of allowed topics. Given a chat request, this guardrail flags the request if its topic
+    is not in the allowed topics."""
 
     def as_dict(self) -> dict:
-        """Serializes the AnthropicConfig into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AiGatewayGuardrailParameters into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key
+        if self.invalid_keywords: body['invalid_keywords'] = [v for v in self.invalid_keywords]
+        if self.pii: body['pii'] = self.pii.as_dict()
+        if self.safety is not None: body['safety'] = self.safety
+        if self.valid_topics: body['valid_topics'] = [v for v in self.valid_topics]
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig:
-        """Deserializes the AnthropicConfig from a dictionary."""
-        return cls(anthropic_api_key=d.get('anthropic_api_key', None))
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters:
+        """Deserializes the AiGatewayGuardrailParameters from a dictionary."""
+        return cls(invalid_keywords=d.get('invalid_keywords', None),
+                   pii=_from_dict(d, 'pii', AiGatewayGuardrailPiiBehavior),
+                   safety=d.get('safety', None),
+                   valid_topics=d.get('valid_topics', None))
 
 
 @dataclass
-class App:
-    name: str
-    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
-    It must be unique within the workspace."""
-
-    active_deployment: Optional[AppDeployment] = None
-    """The active deployment of the app."""
+class AiGatewayGuardrailPiiBehavior:
+    behavior: AiGatewayGuardrailPiiBehaviorBehavior
+    """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
+    guardrail and the request contains PII, the request is not sent to the model server and 400
+    status code is returned; if 'BLOCK' is set for the output guardrail and the model response
+    contains PII, the PII info in the response is redacted and 400 status code is returned."""
 
-    create_time: Optional[str] = None
-    """The creation time of the app. Formatted timestamp in ISO 6801."""
-
-    creator: Optional[str] = None
-    """The email of the user that created the app."""
-
-    description: Optional[str] = None
-    """The description of the app."""
+    def as_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrailPiiBehavior into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.behavior is not None: body['behavior'] = self.behavior.value
+        return body
 
-    pending_deployment: Optional[AppDeployment] = None
-    """The pending deployment of the app."""
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior:
+        """Deserializes the AiGatewayGuardrailPiiBehavior from a dictionary."""
+        return cls(behavior=_enum(d, 'behavior', AiGatewayGuardrailPiiBehaviorBehavior))
 
-    service_principal_id: Optional[int] = None
 
-    service_principal_name: Optional[str] = None
+class AiGatewayGuardrailPiiBehaviorBehavior(Enum):
+    """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
+    guardrail and the request contains PII, the request is not sent to the model server and 400
+    status code is returned; if 'BLOCK' is set for the output guardrail and the model response
+    contains PII, the PII info in the response is redacted and 400 status code is returned."""
 
-    status: Optional[AppStatus] = None
+    BLOCK = 'BLOCK'
+    NONE = 'NONE'
 
-    update_time: Optional[str] = None
-    """The update time of the app. Formatted timestamp in ISO 6801."""
 
-    updater: Optional[str] = None
-    """The email of the user that last updated the app."""
+@dataclass
+class AiGatewayGuardrails:
+    input: Optional[AiGatewayGuardrailParameters] = None
+    """Configuration for input guardrail filters."""
 
-    url: Optional[str] = None
-    """The URL of the app once it is deployed."""
+    output: Optional[AiGatewayGuardrailParameters] = None
+    """Configuration for output guardrail filters."""
 
     def as_dict(self) -> dict:
-        """Serializes the App into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AiGatewayGuardrails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict()
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.creator is not None: body['creator'] = self.creator
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict()
-        if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
-        if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.status: body['status'] = self.status.as_dict()
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.updater is not None: body['updater'] = self.updater
-        if self.url is not None: body['url'] = self.url
+        if self.input: body['input'] = self.input.as_dict()
+        if self.output: body['output'] = self.output.as_dict()
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> App:
-        """Deserializes the App from a dictionary."""
-        return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment),
-                   create_time=d.get('create_time', None),
-                   creator=d.get('creator', None),
-                   description=d.get('description', None),
-                   name=d.get('name', None),
-                   pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
-                   service_principal_id=d.get('service_principal_id', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   status=_from_dict(d, 'status', AppStatus),
-                   update_time=d.get('update_time', None),
-                   updater=d.get('updater', None),
-                   url=d.get('url', None))
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrails:
+        """Deserializes the AiGatewayGuardrails from a dictionary."""
+        return cls(input=_from_dict(d, 'input', AiGatewayGuardrailParameters),
+                   output=_from_dict(d, 'output', AiGatewayGuardrailParameters))
 
 
 @dataclass
-class AppDeployment:
-    source_code_path: str
-    """The workspace file system path of the source code used to create the app deployment. This is
-    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
-    app. The former refers to the original source code location of the app in the workspace during
-    deployment creation, whereas the latter provides a system generated stable snapshotted source
-    code path used by the deployment."""
-
-    mode: AppDeploymentMode
-    """The mode of which the deployment will manage the source code."""
-
-    create_time: Optional[str] = None
-    """The creation time of the deployment. Formatted timestamp in ISO 6801."""
-
-    creator: Optional[str] = None
-    """The email of the user creates the deployment."""
-
-    deployment_artifacts: Optional[AppDeploymentArtifacts] = None
-    """The deployment artifacts for an app."""
+class AiGatewayInferenceTableConfig:
+    catalog_name: Optional[str] = None
+    """The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On
+    update, you have to disable inference table first in order to change the catalog name."""
 
-    deployment_id: Optional[str] = None
-    """The unique id of the deployment."""
+    enabled: Optional[bool] = None
+    """Indicates whether the inference table is enabled."""
 
-    status: Optional[AppDeploymentStatus] = None
-    """Status and status message of the deployment"""
+    schema_name: Optional[str] = None
+    """The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On
+    update, you have to disable inference table first in order to change the schema name."""
 
-    update_time: Optional[str] = None
-    """The update time of the deployment. Formatted timestamp in ISO 6801."""
+    table_name_prefix: Optional[str] = None
+    """The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table
+    first in order to change the prefix name."""
 
     def as_dict(self) -> dict:
-        """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AiGatewayInferenceTableConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.creator is not None: body['creator'] = self.creator
-        if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict()
-        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
-        if self.mode is not None: body['mode'] = self.mode.value
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
-        if self.status: body['status'] = self.status.as_dict()
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> AppDeployment:
-        """Deserializes the AppDeployment from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   creator=d.get('creator', None),
-                   deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts),
-                   deployment_id=d.get('deployment_id', None),
-                   mode=_enum(d, 'mode', AppDeploymentMode),
-                   source_code_path=d.get('source_code_path', None),
-                   status=_from_dict(d, 'status', AppDeploymentStatus),
-                   update_time=d.get('update_time', None))
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayInferenceTableConfig:
+        """Deserializes the AiGatewayInferenceTableConfig from a dictionary."""
+        return cls(catalog_name=d.get('catalog_name', None),
+                   enabled=d.get('enabled', None),
+                   schema_name=d.get('schema_name', None),
+                   table_name_prefix=d.get('table_name_prefix', None))
 
 
 @dataclass
-class AppDeploymentArtifacts:
-    source_code_path: Optional[str] = None
-    """The snapshotted workspace file system path of the source code loaded by the deployed app."""
+class AiGatewayRateLimit:
+    calls: int
+    """Used to specify how many calls are allowed for a key within the renewal_period."""
+
+    renewal_period: AiGatewayRateLimitRenewalPeriod
+    """Renewal period field for a rate limit. Currently, only 'minute' is supported."""
+
+    key: Optional[AiGatewayRateLimitKey] = None
+    """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint'
+    being the default if not specified."""
 
     def as_dict(self) -> dict:
-        """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AiGatewayRateLimit into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        if self.calls is not None: body['calls'] = self.calls
+        if self.key is not None: body['key'] = self.key.value
+        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts:
-        """Deserializes the AppDeploymentArtifacts from a dictionary."""
-        return cls(source_code_path=d.get('source_code_path', None))
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit:
+        """Deserializes the AiGatewayRateLimit from a dictionary."""
+        return cls(calls=d.get('calls', None),
+                   key=_enum(d, 'key', AiGatewayRateLimitKey),
+                   renewal_period=_enum(d, 'renewal_period', AiGatewayRateLimitRenewalPeriod))
 
 
-class AppDeploymentMode(Enum):
+class AiGatewayRateLimitKey(Enum):
+    """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint'
+    being the default if not specified."""
 
-    AUTO_SYNC = 'AUTO_SYNC'
-    MODE_UNSPECIFIED = 'MODE_UNSPECIFIED'
-    SNAPSHOT = 'SNAPSHOT'
+    ENDPOINT = 'endpoint'
+    USER = 'user'
 
 
-class AppDeploymentState(Enum):
+class AiGatewayRateLimitRenewalPeriod(Enum):
+    """Renewal period field for a rate limit. Currently, only 'minute' is supported."""
 
-    FAILED = 'FAILED'
-    IN_PROGRESS = 'IN_PROGRESS'
-    STATE_UNSPECIFIED = 'STATE_UNSPECIFIED'
-    STOPPED = 'STOPPED'
-    SUCCEEDED = 'SUCCEEDED'
+    MINUTE = 'minute'
 
 
 @dataclass
-class AppDeploymentStatus:
-    message: Optional[str] = None
-    """Message corresponding with the deployment state."""
-
-    state: Optional[AppDeploymentState] = None
-    """State of the deployment."""
+class AiGatewayUsageTrackingConfig:
+    enabled: Optional[bool] = None
+    """Whether to enable usage tracking."""
 
     def as_dict(self) -> dict:
-        """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AiGatewayUsageTrackingConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state.value
+        if self.enabled is not None: body['enabled'] = self.enabled
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus:
-        """Deserializes the AppDeploymentStatus from a dictionary."""
-        return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState))
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayUsageTrackingConfig:
+        """Deserializes the AiGatewayUsageTrackingConfig from a dictionary."""
+        return cls(enabled=d.get('enabled', None))
 
 
 @dataclass
-class AppEnvironment:
-    env: Optional[List[EnvVariable]] = None
+class AmazonBedrockConfig:
+    aws_region: str
+    """The AWS region to use. Bedrock has to be enabled there."""
+
+    bedrock_provider: AmazonBedrockConfigBedrockProvider
+    """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
+    Anthropic, Cohere, AI21Labs, Amazon."""
+
+    aws_access_key_id: Optional[str] = None
+    """The Databricks secret key reference for an AWS access key ID with permissions to interact with
+    Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You
+    must provide an API key using one of the following fields: `aws_access_key_id` or
+    `aws_access_key_id_plaintext`."""
+
+    aws_access_key_id_plaintext: Optional[str] = None
+    """An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext
+    string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`.
+    You must provide an API key using one of the following fields: `aws_access_key_id` or
+    `aws_access_key_id_plaintext`."""
+
+    aws_secret_access_key: Optional[str] = None
+    """The Databricks secret key reference for an AWS secret access key paired with the access key ID,
+    with permissions to interact with Bedrock services. If you prefer to paste your API key
+    directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the
+    following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`."""
+
+    aws_secret_access_key_plaintext: Optional[str] = None
+    """An AWS secret access key paired with the access key ID, with permissions to interact with
+    Bedrock services provided as a plaintext string. If you prefer to reference your key using
+    Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the
+    following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`."""
 
     def as_dict(self) -> dict:
-        """Serializes the AppEnvironment into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.env: body['env'] = [v.as_dict() for v in self.env]
+        if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id
+        if self.aws_access_key_id_plaintext is not None:
+            body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key
+        if self.aws_secret_access_key_plaintext is not None:
+            body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext
+        if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> AppEnvironment:
-        """Deserializes the AppEnvironment from a dictionary."""
-        return cls(env=_repeated_dict(d, 'env', EnvVariable))
+    def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig:
+        """Deserializes the AmazonBedrockConfig from a dictionary."""
+        return cls(aws_access_key_id=d.get('aws_access_key_id', None),
+                   aws_access_key_id_plaintext=d.get('aws_access_key_id_plaintext', None),
+                   aws_region=d.get('aws_region', None),
+                   aws_secret_access_key=d.get('aws_secret_access_key', None),
+                   aws_secret_access_key_plaintext=d.get('aws_secret_access_key_plaintext', None),
+                   bedrock_provider=_enum(d, 'bedrock_provider', AmazonBedrockConfigBedrockProvider))
 
 
-class AppState(Enum):
+class AmazonBedrockConfigBedrockProvider(Enum):
+    """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
+    Anthropic, Cohere, AI21Labs, Amazon."""
 
-    CREATING = 'CREATING'
-    DELETED = 'DELETED'
-    DELETING = 'DELETING'
-    ERROR = 'ERROR'
-    IDLE = 'IDLE'
-    RUNNING = 'RUNNING'
-    STARTING = 'STARTING'
-    STATE_UNSPECIFIED = 'STATE_UNSPECIFIED'
+    AI21LABS = 'ai21labs'
+    AMAZON = 'amazon'
+    ANTHROPIC = 'anthropic'
+    COHERE = 'cohere'
 
 
 @dataclass
-class AppStatus:
-    message: Optional[str] = None
-    """Message corresponding with the app state."""
+class AnthropicConfig:
+    anthropic_api_key: Optional[str] = None
+    """The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API
+    key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the
+    following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`."""
 
-    state: Optional[AppState] = None
-    """State of the app."""
+    anthropic_api_key_plaintext: Optional[str] = None
+    """The Anthropic API key provided as a plaintext string. If you prefer to reference your key using
+    Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the
+    following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`."""
 
     def as_dict(self) -> dict:
-        """Serializes the AppStatus into a dictionary suitable for use as a JSON request body."""
+        """Serializes the AnthropicConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state.value
+        if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key
+        if self.anthropic_api_key_plaintext is not None:
+            body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> AppStatus:
-        """Deserializes the AppStatus from a dictionary."""
-        return cls(message=d.get('message', None), state=_enum(d, 'state', AppState))
+    def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig:
+        """Deserializes the AnthropicConfig from a dictionary."""
+        return cls(anthropic_api_key=d.get('anthropic_api_key', None),
+                   anthropic_api_key_plaintext=d.get('anthropic_api_key_plaintext', None))
 
 
 @dataclass
@@ -464,72 +495,35 @@ class ChatMessageRole(Enum):
 
 @dataclass
 class CohereConfig:
-    cohere_api_key: str
-    """The Databricks secret key reference for a Cohere API key."""
+    cohere_api_base: Optional[str] = None
+    """This is an optional field to provide a customized base URL for the Cohere API. If left
+    unspecified, the standard Cohere base URL is used."""
+
+    cohere_api_key: Optional[str] = None
+    """The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key
+    directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following
+    fields: `cohere_api_key` or `cohere_api_key_plaintext`."""
+
+    cohere_api_key_plaintext: Optional[str] = None
+    """The Cohere API key provided as a plaintext string. If you prefer to reference your key using
+    Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following
+    fields: `cohere_api_key` or `cohere_api_key_plaintext`."""
 
     def as_dict(self) -> dict:
         """Serializes the CohereConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base
         if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key
+        if self.cohere_api_key_plaintext is not None:
+            body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CohereConfig:
         """Deserializes the CohereConfig from a dictionary."""
-        return cls(cohere_api_key=d.get('cohere_api_key', None))
-
-
-@dataclass
-class CreateAppDeploymentRequest:
-    source_code_path: str
-    """The workspace file system path of the source code used to create the app deployment. This is
-    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
-    app. The former refers to the original source code location of the app in the workspace during
-    deployment creation, whereas the latter provides a system generated stable snapshotted source
-    code path used by the deployment."""
-
-    mode: AppDeploymentMode
-    """The mode of which the deployment will manage the source code."""
-
-    app_name: Optional[str] = None
-    """The name of the app."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.app_name is not None: body['app_name'] = self.app_name
-        if self.mode is not None: body['mode'] = self.mode.value
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest:
-        """Deserializes the CreateAppDeploymentRequest from a dictionary."""
-        return cls(app_name=d.get('app_name', None),
-                   mode=_enum(d, 'mode', AppDeploymentMode),
-                   source_code_path=d.get('source_code_path', None))
-
-
-@dataclass
-class CreateAppRequest:
-    name: str
-    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
-    It must be unique within the workspace."""
-
-    description: Optional[str] = None
-    """The description of the app."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest:
-        """Deserializes the CreateAppRequest from a dictionary."""
-        return cls(description=d.get('description', None), name=d.get('name', None))
+        return cls(cohere_api_base=d.get('cohere_api_base', None),
+                   cohere_api_key=d.get('cohere_api_key', None),
+                   cohere_api_key_plaintext=d.get('cohere_api_key_plaintext', None))
 
 
 @dataclass
@@ -541,9 +535,13 @@ class CreateServingEndpoint:
     config: EndpointCoreConfigInput
     """The core config of the serving endpoint."""
 
+    ai_gateway: Optional[AiGatewayConfig] = None
+    """The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+    supported as of now."""
+
     rate_limits: Optional[List[RateLimit]] = None
-    """Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model
-    endpoints are supported as of now."""
+    """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
+    Gateway to manage rate limits."""
 
     route_optimized: Optional[bool] = None
     """Enable route optimization for the serving endpoint."""
@@ -554,6 +552,7 @@ class CreateServingEndpoint:
     def as_dict(self) -> dict:
         """Serializes the CreateServingEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
         if self.config: body['config'] = self.config.as_dict()
         if self.name is not None: body['name'] = self.name
         if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
@@ -564,7 +563,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
         """Deserializes the CreateServingEndpoint from a dictionary."""
-        return cls(config=_from_dict(d, 'config', EndpointCoreConfigInput),
+        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
+                   config=_from_dict(d, 'config', EndpointCoreConfigInput),
                    name=d.get('name', None),
                    rate_limits=_repeated_dict(d, 'rate_limits', RateLimit),
                    route_optimized=d.get('route_optimized', None),
@@ -573,19 +573,30 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
 
 @dataclass
 class DatabricksModelServingConfig:
-    databricks_api_token: str
-    """The Databricks secret key reference for a Databricks API token that corresponds to a user or
-    service principal with Can Query access to the model serving endpoint pointed to by this
-    external model."""
-
     databricks_workspace_url: str
     """The URL of the Databricks workspace containing the model serving endpoint pointed to by this
     external model."""
 
+    databricks_api_token: Optional[str] = None
+    """The Databricks secret key reference for a Databricks API token that corresponds to a user or
+    service principal with Can Query access to the model serving endpoint pointed to by this
+    external model. If you prefer to paste your API key directly, see
+    `databricks_api_token_plaintext`. You must provide an API key using one of the following fields:
+    `databricks_api_token` or `databricks_api_token_plaintext`."""
+
+    databricks_api_token_plaintext: Optional[str] = None
+    """The Databricks API token that corresponds to a user or service principal with Can Query access
+    to the model serving endpoint pointed to by this external model provided as a plaintext string.
+    If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`. You
+    must provide an API key using one of the following fields: `databricks_api_token` or
+    `databricks_api_token_plaintext`."""
+
     def as_dict(self) -> dict:
         """Serializes the DatabricksModelServingConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token
+        if self.databricks_api_token_plaintext is not None:
+            body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext
         if self.databricks_workspace_url is not None:
             body['databricks_workspace_url'] = self.databricks_workspace_url
         return body
@@ -594,6 +605,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> DatabricksModelServingConfig:
         """Deserializes the DatabricksModelServingConfig from a dictionary."""
         return cls(databricks_api_token=d.get('databricks_api_token', None),
+                   databricks_api_token_plaintext=d.get('databricks_api_token_plaintext', None),
                    databricks_workspace_url=d.get('databricks_workspace_url', None))
 
 
@@ -846,6 +858,7 @@ class EndpointStateConfigUpdate(Enum):
 
     IN_PROGRESS = 'IN_PROGRESS'
     NOT_UPDATING = 'NOT_UPDATING'
+    UPDATE_CANCELED = 'UPDATE_CANCELED'
     UPDATE_FAILED = 'UPDATE_FAILED'
 
 
@@ -879,28 +892,6 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTag:
         return cls(key=d.get('key', None), value=d.get('value', None))
 
 
-@dataclass
-class EnvVariable:
-    name: Optional[str] = None
-
-    value: Optional[str] = None
-
-    value_from: Optional[str] = None
-
-    def as_dict(self) -> dict:
-        """Serializes the EnvVariable into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.value is not None: body['value'] = self.value
-        if self.value_from is not None: body['value_from'] = self.value_from
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> EnvVariable:
-        """Deserializes the EnvVariable from a dictionary."""
-        return cls(name=d.get('name', None), value=d.get('value', None), value_from=d.get('value_from', None))
-
-
 @dataclass
 class ExportMetricsResponse:
     contents: Optional[BinaryIO] = None
@@ -921,8 +912,8 @@ def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse:
 class ExternalModel:
     provider: ExternalModelProvider
     """The name of the provider for the external model. Currently, the supported providers are
-    'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and
-    'palm'.","""
+    'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
+    'google-cloud-vertex-ai', 'openai', and 'palm'.","""
 
     name: str
     """The name of the external model."""
@@ -945,6 +936,9 @@ class ExternalModel:
     databricks_model_serving_config: Optional[DatabricksModelServingConfig] = None
     """Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'."""
 
+    google_cloud_vertex_ai_config: Optional[GoogleCloudVertexAiConfig] = None
+    """Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'."""
+
     openai_config: Optional[OpenAiConfig] = None
     """OpenAI Config. Only required if the provider is 'openai'."""
 
@@ -960,6 +954,8 @@ def as_dict(self) -> dict:
         if self.cohere_config: body['cohere_config'] = self.cohere_config.as_dict()
         if self.databricks_model_serving_config:
             body['databricks_model_serving_config'] = self.databricks_model_serving_config.as_dict()
+        if self.google_cloud_vertex_ai_config:
+            body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config.as_dict()
         if self.name is not None: body['name'] = self.name
         if self.openai_config: body['openai_config'] = self.openai_config.as_dict()
         if self.palm_config: body['palm_config'] = self.palm_config.as_dict()
@@ -976,6 +972,8 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel:
                    cohere_config=_from_dict(d, 'cohere_config', CohereConfig),
                    databricks_model_serving_config=_from_dict(d, 'databricks_model_serving_config',
                                                               DatabricksModelServingConfig),
+                   google_cloud_vertex_ai_config=_from_dict(d, 'google_cloud_vertex_ai_config',
+                                                            GoogleCloudVertexAiConfig),
                    name=d.get('name', None),
                    openai_config=_from_dict(d, 'openai_config', OpenAiConfig),
                    palm_config=_from_dict(d, 'palm_config', PaLmConfig),
@@ -985,14 +983,15 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel:
 
 class ExternalModelProvider(Enum):
     """The name of the provider for the external model. Currently, the supported providers are
-    'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and
-    'palm'.","""
+    'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
+    'google-cloud-vertex-ai', 'openai', and 'palm'.","""
 
     AI21LABS = 'ai21labs'
     AMAZON_BEDROCK = 'amazon-bedrock'
     ANTHROPIC = 'anthropic'
     COHERE = 'cohere'
     DATABRICKS_MODEL_SERVING = 'databricks-model-serving'
+    GOOGLE_CLOUD_VERTEX_AI = 'google-cloud-vertex-ai'
     OPENAI = 'openai'
     PALM = 'palm'
 
@@ -1091,45 +1090,48 @@ def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsRespo
 
 
 @dataclass
-class ListAppDeploymentsResponse:
-    app_deployments: Optional[List[AppDeployment]] = None
-    """Deployment history of the app."""
-
-    next_page_token: Optional[str] = None
-    """Pagination token to request the next page of apps."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse:
-        """Deserializes the ListAppDeploymentsResponse from a dictionary."""
-        return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment),
-                   next_page_token=d.get('next_page_token', None))
+class GoogleCloudVertexAiConfig:
+    private_key: Optional[str] = None
+    """The Databricks secret key reference for a private key for the service account which has access
+    to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys].
+    If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an
+    API key using one of the following fields: `private_key` or `private_key_plaintext`
+    
+    [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
 
+    private_key_plaintext: Optional[str] = None
+    """The private key for the service account which has access to the Google Cloud Vertex AI Service
+    provided as a plaintext secret. See [Best practices for managing service account keys]. If you
+    prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an
+    API key using one of the following fields: `private_key` or `private_key_plaintext`.
+    
+    [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
 
-@dataclass
-class ListAppsResponse:
-    apps: Optional[List[App]] = None
+    project_id: Optional[str] = None
+    """This is the Google Cloud project id that the service account is associated with."""
 
-    next_page_token: Optional[str] = None
-    """Pagination token to request the next page of apps."""
+    region: Optional[str] = None
+    """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more
+    details. Some models are only available in specific regions.
+    
+    [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations"""
 
     def as_dict(self) -> dict:
-        """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apps: body['apps'] = [v.as_dict() for v in self.apps]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.private_key is not None: body['private_key'] = self.private_key
+        if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext
+        if self.project_id is not None: body['project_id'] = self.project_id
+        if self.region is not None: body['region'] = self.region
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse:
-        """Deserializes the ListAppsResponse from a dictionary."""
-        return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None))
+    def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig:
+        """Deserializes the GoogleCloudVertexAiConfig from a dictionary."""
+        return cls(private_key=d.get('private_key', None),
+                   private_key_plaintext=d.get('private_key_plaintext', None),
+                   project_id=d.get('project_id', None),
+                   region=d.get('region', None))
 
 
 @dataclass
@@ -1172,19 +1174,35 @@ class OpenAiConfig:
     """This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID."""
 
     microsoft_entra_client_secret: Optional[str] = None
-    """The Databricks secret key reference for the Microsoft Entra Client Secret that is only required
-    for Azure AD OpenAI."""
+    """The Databricks secret key reference for a client secret used for Microsoft Entra ID
+    authentication. If you prefer to paste your client secret directly, see
+    `microsoft_entra_client_secret_plaintext`. You must provide an API key using one of the
+    following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`."""
+
+    microsoft_entra_client_secret_plaintext: Optional[str] = None
+    """The client secret used for Microsoft Entra ID authentication provided as a plaintext string. If
+    you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.
+    You must provide an API key using one of the following fields: `microsoft_entra_client_secret`
+    or `microsoft_entra_client_secret_plaintext`."""
 
     microsoft_entra_tenant_id: Optional[str] = None
     """This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID."""
 
     openai_api_base: Optional[str] = None
-    """This is the base URL for the OpenAI API (default: "https://api.openai.com/v1"). For Azure
-    OpenAI, this field is required, and is the base URL for the Azure OpenAI API service provided by
-    Azure."""
+    """This is a field to provide a customized base URl for the OpenAI API. For Azure OpenAI, this
+    field is required, and is the base URL for the Azure OpenAI API service provided by Azure. For
+    other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI
+    base URL is used."""
 
     openai_api_key: Optional[str] = None
-    """The Databricks secret key reference for an OpenAI or Azure OpenAI API key."""
+    """The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If
+    you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an
+    API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`."""
+
+    openai_api_key_plaintext: Optional[str] = None
+    """The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you
+    prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an
+    API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`."""
 
     openai_api_type: Optional[str] = None
     """This is an optional field to specify the type of OpenAI API to use. For Azure OpenAI, this field
@@ -1210,10 +1228,14 @@ def as_dict(self) -> dict:
             body['microsoft_entra_client_id'] = self.microsoft_entra_client_id
         if self.microsoft_entra_client_secret is not None:
             body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret
+        if self.microsoft_entra_client_secret_plaintext is not None:
+            body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext
         if self.microsoft_entra_tenant_id is not None:
             body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id
         if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base
         if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key
+        if self.openai_api_key_plaintext is not None:
+            body['openai_api_key_plaintext'] = self.openai_api_key_plaintext
         if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type
         if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version
         if self.openai_deployment_name is not None:
@@ -1226,9 +1248,12 @@ def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig:
         """Deserializes the OpenAiConfig from a dictionary."""
         return cls(microsoft_entra_client_id=d.get('microsoft_entra_client_id', None),
                    microsoft_entra_client_secret=d.get('microsoft_entra_client_secret', None),
+                   microsoft_entra_client_secret_plaintext=d.get('microsoft_entra_client_secret_plaintext',
+                                                                 None),
                    microsoft_entra_tenant_id=d.get('microsoft_entra_tenant_id', None),
                    openai_api_base=d.get('openai_api_base', None),
                    openai_api_key=d.get('openai_api_key', None),
+                   openai_api_key_plaintext=d.get('openai_api_key_plaintext', None),
                    openai_api_type=d.get('openai_api_type', None),
                    openai_api_version=d.get('openai_api_version', None),
                    openai_deployment_name=d.get('openai_deployment_name', None),
@@ -1237,19 +1262,29 @@ def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig:
 
 @dataclass
 class PaLmConfig:
-    palm_api_key: str
-    """The Databricks secret key reference for a PaLM API key."""
+    palm_api_key: Optional[str] = None
+    """The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key
+    directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following
+    fields: `palm_api_key` or `palm_api_key_plaintext`."""
+
+    palm_api_key_plaintext: Optional[str] = None
+    """The PaLM API key provided as a plaintext string. If you prefer to reference your key using
+    Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following
+    fields: `palm_api_key` or `palm_api_key_plaintext`."""
 
     def as_dict(self) -> dict:
         """Serializes the PaLmConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key
+        if self.palm_api_key_plaintext is not None:
+            body['palm_api_key_plaintext'] = self.palm_api_key_plaintext
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PaLmConfig:
         """Deserializes the PaLmConfig from a dictionary."""
-        return cls(palm_api_key=d.get('palm_api_key', None))
+        return cls(palm_api_key=d.get('palm_api_key', None),
+                   palm_api_key_plaintext=d.get('palm_api_key_plaintext', None))
 
 
 @dataclass
@@ -1306,6 +1341,42 @@ def from_dict(cls, d: Dict[str, any]) -> PayloadTable:
                    status_message=d.get('status_message', None))
 
 
+@dataclass
+class PutAiGatewayResponse:
+    guardrails: Optional[AiGatewayGuardrails] = None
+    """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and
+    responses."""
+
+    inference_table_config: Optional[AiGatewayInferenceTableConfig] = None
+    """Configuration for payload logging using inference tables. Use these tables to monitor and audit
+    data being sent to and received from model APIs and to improve model quality ."""
+
+    rate_limits: Optional[List[AiGatewayRateLimit]] = None
+    """Configuration for rate limits which can be set to limit endpoint traffic."""
+
+    usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None
+    """Configuration to enable usage tracking using system tables. These tables allow you to monitor
+    operational usage on endpoints and their associated costs."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PutAiGatewayResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
+        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse:
+        """Deserializes the PutAiGatewayResponse from a dictionary."""
+        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
+                   inference_table_config=_from_dict(d, 'inference_table_config',
+                                                     AiGatewayInferenceTableConfig),
+                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
+                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
+
+
 @dataclass
 class PutResponse:
     rate_limits: Optional[List[RateLimit]] = None
@@ -1581,11 +1652,10 @@ class ServedEntityInput:
     external_model: Optional[ExternalModel] = None
     """The external model to be served. NOTE: Only one of external_model and (entity_name,
     entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with
-    the latter set being used for custom model serving for a Databricks registered model. When an
-    external_model is present, the served entities list can only have one served_entity object. For
-    an existing endpoint with external_model, it can not be updated to an endpoint without
+    the latter set being used for custom model serving for a Databricks registered model. For an
+    existing endpoint with external_model, it cannot be updated to an endpoint without
     external_model. If the endpoint is created without external_model, users cannot update it to add
-    external_model later."""
+    external_model later. The task type of all external models within an endpoint must be the same."""
 
     instance_profile_arn: Optional[str] = None
     """ARN of the instance profile that the served entity uses to access AWS resources."""
@@ -1817,14 +1887,6 @@ class ServedModelInput:
     model_version: str
     """The version of the model in Databricks Model Registry or Unity Catalog to be served."""
 
-    workload_size: ServedModelInputWorkloadSize
-    """The workload size of the served model. The workload size corresponds to a range of provisioned
-    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
-    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
-    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
-    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
-    each workload size will be 0."""
-
     scale_to_zero_enabled: bool
     """Whether the compute resources for the served model should scale down to zero."""
 
@@ -1837,11 +1899,25 @@ class ServedModelInput:
     instance_profile_arn: Optional[str] = None
     """ARN of the instance profile that the served model will use to access AWS resources."""
 
+    max_provisioned_throughput: Optional[int] = None
+    """The maximum tokens per second that the endpoint can scale up to."""
+
+    min_provisioned_throughput: Optional[int] = None
+    """The minimum tokens per second that the endpoint can scale down to."""
+
     name: Optional[str] = None
     """The name of a served model. It must be unique across an endpoint. If not specified, this field
     will default to -. A served model name can consist of alphanumeric
     characters, dashes, and underscores."""
 
+    workload_size: Optional[ServedModelInputWorkloadSize] = None
+    """The workload size of the served model. The workload size corresponds to a range of provisioned
+    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
+    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
+    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
+    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
+    each workload size will be 0."""
+
     workload_type: Optional[ServedModelInputWorkloadType] = None
     """The workload type of the served model. The workload type selects which type of compute to use in
     the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
@@ -1855,6 +1931,10 @@ def as_dict(self) -> dict:
         body = {}
         if self.environment_vars: body['environment_vars'] = self.environment_vars
         if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_provisioned_throughput is not None:
+            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+        if self.min_provisioned_throughput is not None:
+            body['min_provisioned_throughput'] = self.min_provisioned_throughput
         if self.model_name is not None: body['model_name'] = self.model_name
         if self.model_version is not None: body['model_version'] = self.model_version
         if self.name is not None: body['name'] = self.name
@@ -1868,6 +1948,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelInput:
         """Deserializes the ServedModelInput from a dictionary."""
         return cls(environment_vars=d.get('environment_vars', None),
                    instance_profile_arn=d.get('instance_profile_arn', None),
+                   max_provisioned_throughput=d.get('max_provisioned_throughput', None),
+                   min_provisioned_throughput=d.get('min_provisioned_throughput', None),
                    model_name=d.get('model_name', None),
                    model_version=d.get('model_version', None),
                    name=d.get('name', None),
@@ -2081,6 +2163,10 @@ def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse:
 
 @dataclass
 class ServingEndpoint:
+    ai_gateway: Optional[AiGatewayConfig] = None
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
+    currently supported."""
+
     config: Optional[EndpointCoreConfigSummary] = None
     """The config that is currently being served by the endpoint."""
 
@@ -2112,6 +2198,7 @@ class ServingEndpoint:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
         if self.config: body['config'] = self.config.as_dict()
         if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
         if self.creator is not None: body['creator'] = self.creator
@@ -2127,7 +2214,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpoint:
         """Deserializes the ServingEndpoint from a dictionary."""
-        return cls(config=_from_dict(d, 'config', EndpointCoreConfigSummary),
+        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
+                   config=_from_dict(d, 'config', EndpointCoreConfigSummary),
                    creation_timestamp=d.get('creation_timestamp', None),
                    creator=d.get('creator', None),
                    id=d.get('id', None),
@@ -2211,6 +2299,10 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse:
 
 @dataclass
 class ServingEndpointDetailed:
+    ai_gateway: Optional[AiGatewayConfig] = None
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
+    currently supported."""
+
     config: Optional[EndpointCoreConfigOutput] = None
     """The config that is currently being served by the endpoint."""
 
@@ -2257,6 +2349,7 @@ class ServingEndpointDetailed:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpointDetailed into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
         if self.config: body['config'] = self.config.as_dict()
         if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
         if self.creator is not None: body['creator'] = self.creator
@@ -2277,7 +2370,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed:
         """Deserializes the ServingEndpointDetailed from a dictionary."""
-        return cls(config=_from_dict(d, 'config', EndpointCoreConfigOutput),
+        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
+                   config=_from_dict(d, 'config', EndpointCoreConfigOutput),
                    creation_timestamp=d.get('creation_timestamp', None),
                    creator=d.get('creator', None),
                    data_plane_info=_from_dict(d, 'data_plane_info', ModelDataPlaneInfo),
@@ -2404,32 +2498,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest:
                    serving_endpoint_id=d.get('serving_endpoint_id', None))
 
 
-@dataclass
-class StartAppRequest:
-    name: Optional[str] = None
-    """The name of the app."""
-
-
-@dataclass
-class StopAppRequest:
-    name: Optional[str] = None
-    """The name of the app."""
-
-
-@dataclass
-class StopAppResponse:
-
-    def as_dict(self) -> dict:
-        """Serializes the StopAppResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> StopAppResponse:
-        """Deserializes the StopAppResponse from a dictionary."""
-        return cls()
-
-
 @dataclass
 class TrafficConfig:
     routes: Optional[List[Route]] = None
@@ -2448,34 +2516,12 @@ def from_dict(cls, d: Dict[str, any]) -> TrafficConfig:
 
 
 @dataclass
-class UpdateAppRequest:
-    name: str
-    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
-    It must be unique within the workspace."""
+class V1ResponseChoiceElement:
+    finish_reason: Optional[str] = None
+    """The finish reason returned by the endpoint."""
 
-    description: Optional[str] = None
-    """The description of the app."""
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest:
-        """Deserializes the UpdateAppRequest from a dictionary."""
-        return cls(description=d.get('description', None), name=d.get('name', None))
-
-
-@dataclass
-class V1ResponseChoiceElement:
-    finish_reason: Optional[str] = None
-    """The finish reason returned by the endpoint."""
-
-    index: Optional[int] = None
-    """The index of the choice in the __chat or completions__ response."""
+    index: Optional[int] = None
+    """The index of the choice in the __chat or completions__ response."""
 
     logprobs: Optional[int] = None
     """The logprobs returned only by the __completions__ endpoint."""
@@ -2506,333 +2552,6 @@ def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement:
                    text=d.get('text', None))
 
 
-class AppsAPI:
-    """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
-    Databricks services, and enable users to interact through single sign-on."""
-
-    def __init__(self, api_client):
-        self._api = api_client
-
-    def wait_get_app_idle(self,
-                          name: str,
-                          timeout=timedelta(minutes=20),
-                          callback: Optional[Callable[[App], None]] = None) -> App:
-        deadline = time.time() + timeout.total_seconds()
-        target_states = (AppState.IDLE, )
-        failure_states = (AppState.ERROR, )
-        status_message = 'polling...'
-        attempt = 1
-        while time.time() < deadline:
-            poll = self.get(name=name)
-            status = poll.status.state
-            status_message = f'current status: {status}'
-            if poll.status:
-                status_message = poll.status.message
-            if status in target_states:
-                return poll
-            if callback:
-                callback(poll)
-            if status in failure_states:
-                msg = f'failed to reach IDLE, got {status}: {status_message}'
-                raise OperationFailed(msg)
-            prefix = f"name={name}"
-            sleep = attempt
-            if sleep > 10:
-                # sleep 10s max per attempt
-                sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
-            time.sleep(sleep + random.random())
-            attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
-
-    def wait_get_deployment_app_succeeded(
-            self,
-            app_name: str,
-            deployment_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment:
-        deadline = time.time() + timeout.total_seconds()
-        target_states = (AppDeploymentState.SUCCEEDED, )
-        failure_states = (AppDeploymentState.FAILED, )
-        status_message = 'polling...'
-        attempt = 1
-        while time.time() < deadline:
-            poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id)
-            status = poll.status.state
-            status_message = f'current status: {status}'
-            if poll.status:
-                status_message = poll.status.message
-            if status in target_states:
-                return poll
-            if callback:
-                callback(poll)
-            if status in failure_states:
-                msg = f'failed to reach SUCCEEDED, got {status}: {status_message}'
-                raise OperationFailed(msg)
-            prefix = f"app_name={app_name}, deployment_id={deployment_id}"
-            sleep = attempt
-            if sleep > 10:
-                # sleep 10s max per attempt
-                sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
-            time.sleep(sleep + random.random())
-            attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
-
-    def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]:
-        """Create an app.
-        
-        Creates a new app.
-        
-        :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        
-        :returns:
-          Long-running operation waiter for :class:`App`.
-          See :method:wait_get_app_idle for more details.
-        """
-        body = {}
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.0/preview/apps', body=body, headers=headers)
-        return Wait(self.wait_get_app_idle, response=App.from_dict(op_response), name=op_response['name'])
-
-    def create_and_wait(self,
-                        name: str,
-                        *,
-                        description: Optional[str] = None,
-                        timeout=timedelta(minutes=20)) -> App:
-        return self.create(description=description, name=name).result(timeout=timeout)
-
-    def delete(self, name: str):
-        """Delete an app.
-        
-        Deletes an app.
-        
-        :param name: str
-          The name of the app.
-        
-        
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', f'/api/2.0/preview/apps/{name}', headers=headers)
-
-    def deploy(self, app_name: str, source_code_path: str, mode: AppDeploymentMode) -> Wait[AppDeployment]:
-        """Create an app deployment.
-        
-        Creates an app deployment for the app with the supplied name.
-        
-        :param app_name: str
-          The name of the app.
-        :param source_code_path: str
-          The workspace file system path of the source code used to create the app deployment. This is
-          different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
-          The former refers to the original source code location of the app in the workspace during deployment
-          creation, whereas the latter provides a system generated stable snapshotted source code path used by
-          the deployment.
-        :param mode: :class:`AppDeploymentMode`
-          The mode of which the deployment will manage the source code.
-        
-        :returns:
-          Long-running operation waiter for :class:`AppDeployment`.
-          See :method:wait_get_deployment_app_succeeded for more details.
-        """
-        body = {}
-        if mode is not None: body['mode'] = mode.value
-        if source_code_path is not None: body['source_code_path'] = source_code_path
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST',
-                                   f'/api/2.0/preview/apps/{app_name}/deployments',
-                                   body=body,
-                                   headers=headers)
-        return Wait(self.wait_get_deployment_app_succeeded,
-                    response=AppDeployment.from_dict(op_response),
-                    app_name=app_name,
-                    deployment_id=op_response['deployment_id'])
-
-    def deploy_and_wait(self,
-                        app_name: str,
-                        source_code_path: str,
-                        mode: AppDeploymentMode,
-                        timeout=timedelta(minutes=20)) -> AppDeployment:
-        return self.deploy(app_name=app_name, mode=mode,
-                           source_code_path=source_code_path).result(timeout=timeout)
-
-    def get(self, name: str) -> App:
-        """Get an app.
-        
-        Retrieves information for the app with the supplied name.
-        
-        :param name: str
-          The name of the app.
-        
-        :returns: :class:`App`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', f'/api/2.0/preview/apps/{name}', headers=headers)
-        return App.from_dict(res)
-
-    def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment:
-        """Get an app deployment.
-        
-        Retrieves information for the app deployment with the supplied name and deployment id.
-        
-        :param app_name: str
-          The name of the app.
-        :param deployment_id: str
-          The unique id of the deployment.
-        
-        :returns: :class:`AppDeployment`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           f'/api/2.0/preview/apps/{app_name}/deployments/{deployment_id}',
-                           headers=headers)
-        return AppDeployment.from_dict(res)
-
-    def get_environment(self, name: str) -> AppEnvironment:
-        """Get app environment.
-        
-        Retrieves app environment.
-        
-        :param name: str
-          The name of the app.
-        
-        :returns: :class:`AppEnvironment`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', f'/api/2.0/preview/apps/{name}/environment', headers=headers)
-        return AppEnvironment.from_dict(res)
-
-    def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]:
-        """List apps.
-        
-        Lists all apps in the workspace.
-        
-        :param page_size: int (optional)
-          Upper bound for items returned.
-        :param page_token: str (optional)
-          Pagination token to go to the next page of apps. Requests first page if absent.
-        
-        :returns: Iterator over :class:`App`
-        """
-
-        query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        while True:
-            json = self._api.do('GET', '/api/2.0/preview/apps', query=query, headers=headers)
-            if 'apps' in json:
-                for v in json['apps']:
-                    yield App.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
-                return
-            query['page_token'] = json['next_page_token']
-
-    def list_deployments(self,
-                         app_name: str,
-                         *,
-                         page_size: Optional[int] = None,
-                         page_token: Optional[str] = None) -> Iterator[AppDeployment]:
-        """List app deployments.
-        
-        Lists all app deployments for the app with the supplied name.
-        
-        :param app_name: str
-          The name of the app.
-        :param page_size: int (optional)
-          Upper bound for items returned.
-        :param page_token: str (optional)
-          Pagination token to go to the next page of apps. Requests first page if absent.
-        
-        :returns: Iterator over :class:`AppDeployment`
-        """
-
-        query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/preview/apps/{app_name}/deployments',
-                                query=query,
-                                headers=headers)
-            if 'app_deployments' in json:
-                for v in json['app_deployments']:
-                    yield AppDeployment.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
-                return
-            query['page_token'] = json['next_page_token']
-
-    def start(self, name: str) -> AppDeployment:
-        """Start an app.
-        
-        Start the last active deployment of the app in the workspace.
-        
-        :param name: str
-          The name of the app.
-        
-        :returns: :class:`AppDeployment`
-        """
-
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', f'/api/2.0/preview/apps/{name}/start', headers=headers)
-        return AppDeployment.from_dict(res)
-
-    def stop(self, name: str):
-        """Stop an app.
-        
-        Stops the active deployment of the app in the workspace.
-        
-        :param name: str
-          The name of the app.
-        
-        
-        """
-
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', f'/api/2.0/preview/apps/{name}/stop', headers=headers)
-
-    def update(self, name: str, *, description: Optional[str] = None) -> App:
-        """Update an app.
-        
-        Updates the app with the supplied name.
-        
-        :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        
-        :returns: :class:`App`
-        """
-        body = {}
-        if description is not None: body['description'] = description
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.0/preview/apps/{name}', body=body, headers=headers)
-        return App.from_dict(res)
-
-
 class ServingEndpointsAPI:
     """The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
     
@@ -2855,7 +2574,8 @@ def wait_get_serving_endpoint_not_updating(
             callback: Optional[Callable[[ServingEndpointDetailed], None]] = None) -> ServingEndpointDetailed:
         deadline = time.time() + timeout.total_seconds()
         target_states = (EndpointStateConfigUpdate.NOT_UPDATING, )
-        failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, )
+        failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, EndpointStateConfigUpdate.UPDATE_CANCELED,
+                          )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
@@ -2903,6 +2623,7 @@ def create(self,
                name: str,
                config: EndpointCoreConfigInput,
                *,
+               ai_gateway: Optional[AiGatewayConfig] = None,
                rate_limits: Optional[List[RateLimit]] = None,
                route_optimized: Optional[bool] = None,
                tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]:
@@ -2913,9 +2634,12 @@ def create(self,
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
         :param config: :class:`EndpointCoreConfigInput`
           The core config of the serving endpoint.
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
-          Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model
-          endpoints are supported as of now.
+          Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
+          Gateway to manage rate limits.
         :param route_optimized: bool (optional)
           Enable route optimization for the serving endpoint.
         :param tags: List[:class:`EndpointTag`] (optional)
@@ -2926,6 +2650,7 @@ def create(self,
           See :method:wait_get_serving_endpoint_not_updating for more details.
         """
         body = {}
+        if ai_gateway is not None: body['ai_gateway'] = ai_gateway.as_dict()
         if config is not None: body['config'] = config.as_dict()
         if name is not None: body['name'] = name
         if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits]
@@ -2943,11 +2668,13 @@ def create_and_wait(
         name: str,
         config: EndpointCoreConfigInput,
         *,
+        ai_gateway: Optional[AiGatewayConfig] = None,
         rate_limits: Optional[List[RateLimit]] = None,
         route_optimized: Optional[bool] = None,
         tags: Optional[List[EndpointTag]] = None,
         timeout=timedelta(minutes=20)) -> ServingEndpointDetailed:
-        return self.create(config=config,
+        return self.create(ai_gateway=ai_gateway,
+                           config=config,
                            name=name,
                            rate_limits=rate_limits,
                            route_optimized=route_optimized,
@@ -3113,8 +2840,8 @@ def patch(self,
     def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse:
         """Update rate limits of a serving endpoint.
         
-        Used to update the rate limits of a serving endpoint. NOTE: only external and foundation model
-        endpoints are supported as of now.
+        Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
+        currently supported. For external models, use AI Gateway to manage rate limits.
         
         :param name: str
           The name of the serving endpoint whose rate limits are being updated. This field is required.
@@ -3133,6 +2860,45 @@ def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> Pu
                            headers=headers)
         return PutResponse.from_dict(res)
 
+    def put_ai_gateway(
+            self,
+            name: str,
+            *,
+            guardrails: Optional[AiGatewayGuardrails] = None,
+            inference_table_config: Optional[AiGatewayInferenceTableConfig] = None,
+            rate_limits: Optional[List[AiGatewayRateLimit]] = None,
+            usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None) -> PutAiGatewayResponse:
+        """Update AI Gateway of a serving endpoint.
+        
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently
+        supported.
+        
+        :param name: str
+          The name of the serving endpoint whose AI Gateway is being updated. This field is required.
+        :param guardrails: :class:`AiGatewayGuardrails` (optional)
+          Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
+        :param inference_table_config: :class:`AiGatewayInferenceTableConfig` (optional)
+          Configuration for payload logging using inference tables. Use these tables to monitor and audit data
+          being sent to and received from model APIs and to improve model quality.
+        :param rate_limits: List[:class:`AiGatewayRateLimit`] (optional)
+          Configuration for rate limits which can be set to limit endpoint traffic.
+        :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
+          Configuration to enable usage tracking using system tables. These tables allow you to monitor
+          operational usage on endpoints and their associated costs.
+        
+        :returns: :class:`PutAiGatewayResponse`
+        """
+        body = {}
+        if guardrails is not None: body['guardrails'] = guardrails.as_dict()
+        if inference_table_config is not None:
+            body['inference_table_config'] = inference_table_config.as_dict()
+        if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits]
+        if usage_tracking_config is not None: body['usage_tracking_config'] = usage_tracking_config.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PUT', f'/api/2.0/serving-endpoints/{name}/ai-gateway', body=body, headers=headers)
+        return PutAiGatewayResponse.from_dict(res)
+
     def query(self,
               name: str,
               *,
@@ -3335,3 +3101,118 @@ def update_permissions(
                            body=body,
                            headers=headers)
         return ServingEndpointPermissions.from_dict(res)
+
+
+class ServingEndpointsDataPlaneAPI:
+    """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving
+    endpoints service."""
+
+    def __init__(self, api_client, control_plane):
+        self._api = api_client
+        self._control_plane = control_plane
+        self._data_plane_service = DataPlaneService()
+
+    def query(self,
+              name: str,
+              *,
+              dataframe_records: Optional[List[Any]] = None,
+              dataframe_split: Optional[DataframeSplitInput] = None,
+              extra_params: Optional[Dict[str, str]] = None,
+              input: Optional[Any] = None,
+              inputs: Optional[Any] = None,
+              instances: Optional[List[Any]] = None,
+              max_tokens: Optional[int] = None,
+              messages: Optional[List[ChatMessage]] = None,
+              n: Optional[int] = None,
+              prompt: Optional[Any] = None,
+              stop: Optional[List[str]] = None,
+              stream: Optional[bool] = None,
+              temperature: Optional[float] = None) -> QueryEndpointResponse:
+        """Query a serving endpoint.
+        
+        :param name: str
+          The name of the serving endpoint. This field is required.
+        :param dataframe_records: List[Any] (optional)
+          Pandas Dataframe input in the records orientation.
+        :param dataframe_split: :class:`DataframeSplitInput` (optional)
+          Pandas Dataframe input in the split orientation.
+        :param extra_params: Dict[str,str] (optional)
+          The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
+          foundation model__ serving endpoints. This is a map of strings and should only be used with other
+          external/foundation model query fields.
+        :param input: Any (optional)
+          The input string (or array of strings) field used ONLY for __embeddings external & foundation
+          model__ serving endpoints and is the only field (along with extra_params if needed) used by
+          embeddings queries.
+        :param inputs: Any (optional)
+          Tensor-based input in columnar format.
+        :param instances: List[Any] (optional)
+          Tensor-based input in row format.
+        :param max_tokens: int (optional)
+          The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is an integer and should only be used with other chat/completions query fields.
+        :param messages: List[:class:`ChatMessage`] (optional)
+          The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
+          map of strings and should only be used with other chat query fields.
+        :param n: int (optional)
+          The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
+          model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
+          used with other chat/completions query fields.
+        :param prompt: Any (optional)
+          The prompt string (or array of strings) field used ONLY for __completions external & foundation
+          model__ serving endpoints and should only be used with other completions query fields.
+        :param stop: List[str] (optional)
+          The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
+          serving endpoints. This is a list of strings and should only be used with other chat/completions
+          query fields.
+        :param stream: bool (optional)
+          The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
+          query fields.
+        :param temperature: float (optional)
+          The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
+          other chat/completions query fields.
+        
+        :returns: :class:`QueryEndpointResponse`
+        """
+        body = {}
+        if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records]
+        if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict()
+        if extra_params is not None: body['extra_params'] = extra_params
+        if input is not None: body['input'] = input
+        if inputs is not None: body['inputs'] = inputs
+        if instances is not None: body['instances'] = [v for v in instances]
+        if max_tokens is not None: body['max_tokens'] = max_tokens
+        if messages is not None: body['messages'] = [v.as_dict() for v in messages]
+        if n is not None: body['n'] = n
+        if prompt is not None: body['prompt'] = prompt
+        if stop is not None: body['stop'] = [v for v in stop]
+        if stream is not None: body['stream'] = stream
+        if temperature is not None: body['temperature'] = temperature
+
+        def info_getter():
+            response = self._control_plane.get(name=name, )
+            if response.data_plane_info is None:
+                raise Exception("Resource does not support direct Data Plane access")
+            return response.data_plane_info.query_info
+
+        get_params = [name, ]
+        data_plane_details = self._data_plane_service.get_data_plane_details('query', get_params, info_getter,
+                                                                             self._api.get_oauth_token)
+        token = data_plane_details.token
+
+        def auth(r: requests.PreparedRequest) -> requests.PreparedRequest:
+            authorization = f"{token.token_type} {token.access_token}"
+            r.headers["Authorization"] = authorization
+            return r
+
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        response_headers = ['served-model-name', ]
+        res = self._api.do('POST',
+                           url=data_plane_details.endpoint_url,
+                           body=body,
+                           headers=headers,
+                           response_headers=response_headers,
+                           auth=auth)
+        return QueryEndpointResponse.from_dict(res)
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index b02323848..a6a235158 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -50,6 +50,22 @@ def from_dict(cls, d: Dict[str, any]) -> AutomaticClusterUpdateSetting:
                    setting_name=d.get('setting_name', None))
 
 
+@dataclass
+class BooleanMessage:
+    value: Optional[bool] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the BooleanMessage into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> BooleanMessage:
+        """Deserializes the BooleanMessage from a dictionary."""
+        return cls(value=d.get('value', None))
+
+
 @dataclass
 class ClusterAutoRestartMessage:
     can_toggle: Optional[bool] = None
@@ -147,7 +163,6 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWin
 
 class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum):
 
-    DAY_OF_WEEK_UNSPECIFIED = 'DAY_OF_WEEK_UNSPECIFIED'
     FRIDAY = 'FRIDAY'
     MONDAY = 'MONDAY'
     SATURDAY = 'SATURDAY'
@@ -192,7 +207,6 @@ class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum):
     SECOND_AND_FOURTH_OF_MONTH = 'SECOND_AND_FOURTH_OF_MONTH'
     SECOND_OF_MONTH = 'SECOND_OF_MONTH'
     THIRD_OF_MONTH = 'THIRD_OF_MONTH'
-    WEEK_DAY_FREQUENCY_UNSPECIFIED = 'WEEK_DAY_FREQUENCY_UNSPECIFIED'
 
 
 @dataclass
@@ -281,7 +295,7 @@ def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfileSetting:
 class ComplianceStandard(Enum):
     """Compliance stardard for SHIELD customers"""
 
-    COMPLIANCE_STANDARD_UNSPECIFIED = 'COMPLIANCE_STANDARD_UNSPECIFIED'
+    CANADA_PROTECTED_B = 'CANADA_PROTECTED_B'
     CYBER_ESSENTIAL_PLUS = 'CYBER_ESSENTIAL_PLUS'
     FEDRAMP_HIGH = 'FEDRAMP_HIGH'
     FEDRAMP_IL5 = 'FEDRAMP_IL5'
@@ -293,6 +307,38 @@ class ComplianceStandard(Enum):
     PCI_DSS = 'PCI_DSS'
 
 
+@dataclass
+class Config:
+    email: Optional[EmailConfig] = None
+
+    generic_webhook: Optional[GenericWebhookConfig] = None
+
+    microsoft_teams: Optional[MicrosoftTeamsConfig] = None
+
+    pagerduty: Optional[PagerdutyConfig] = None
+
+    slack: Optional[SlackConfig] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the Config into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.email: body['email'] = self.email.as_dict()
+        if self.generic_webhook: body['generic_webhook'] = self.generic_webhook.as_dict()
+        if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams.as_dict()
+        if self.pagerduty: body['pagerduty'] = self.pagerduty.as_dict()
+        if self.slack: body['slack'] = self.slack.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Config:
+        """Deserializes the Config from a dictionary."""
+        return cls(email=_from_dict(d, 'email', EmailConfig),
+                   generic_webhook=_from_dict(d, 'generic_webhook', GenericWebhookConfig),
+                   microsoft_teams=_from_dict(d, 'microsoft_teams', MicrosoftTeamsConfig),
+                   pagerduty=_from_dict(d, 'pagerduty', PagerdutyConfig),
+                   slack=_from_dict(d, 'slack', SlackConfig))
+
+
 @dataclass
 class CreateIpAccessList:
     """Details required to configure a block list or allow list."""
@@ -367,6 +413,27 @@ def from_dict(cls, d: Dict[str, any]) -> CreateNetworkConnectivityConfigRequest:
         return cls(name=d.get('name', None), region=d.get('region', None))
 
 
+@dataclass
+class CreateNotificationDestinationRequest:
+    config: Optional[Config] = None
+    """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs."""
+
+    display_name: Optional[str] = None
+    """The display name for the notification destination."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.config: body['config'] = self.config.as_dict()
+        if self.display_name is not None: body['display_name'] = self.display_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateNotificationDestinationRequest:
+        """Deserializes the CreateNotificationDestinationRequest from a dictionary."""
+        return cls(config=_from_dict(d, 'config', Config), display_name=d.get('display_name', None))
+
+
 @dataclass
 class CreateOboTokenRequest:
     """Configuration details for creating on-behalf tokens."""
@@ -629,6 +696,78 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteDefaultNamespaceSettingResponse:
         return cls(etag=d.get('etag', None))
 
 
+@dataclass
+class DeleteDisableLegacyAccessResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyAccessResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyAccessResponse:
+        """Deserializes the DeleteDisableLegacyAccessResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
+@dataclass
+class DeleteDisableLegacyDbfsResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyDbfsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyDbfsResponse:
+        """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
+@dataclass
+class DeleteDisableLegacyFeaturesResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyFeaturesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyFeaturesResponse:
+        """Deserializes the DeleteDisableLegacyFeaturesResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
 @dataclass
 class DeleteNetworkConnectivityConfigurationResponse:
 
@@ -705,6 +844,149 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteRestrictWorkspaceAdminsSettingRes
         return cls(etag=d.get('etag', None))
 
 
+class DestinationType(Enum):
+
+    EMAIL = 'EMAIL'
+    MICROSOFT_TEAMS = 'MICROSOFT_TEAMS'
+    PAGERDUTY = 'PAGERDUTY'
+    SLACK = 'SLACK'
+    WEBHOOK = 'WEBHOOK'
+
+
+@dataclass
+class DisableLegacyAccess:
+    disable_legacy_access: BooleanMessage
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DisableLegacyAccess into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access.as_dict()
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DisableLegacyAccess:
+        """Deserializes the DisableLegacyAccess from a dictionary."""
+        return cls(disable_legacy_access=_from_dict(d, 'disable_legacy_access', BooleanMessage),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
+@dataclass
+class DisableLegacyDbfs:
+    disable_legacy_dbfs: BooleanMessage
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DisableLegacyDbfs into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs.as_dict()
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DisableLegacyDbfs:
+        """Deserializes the DisableLegacyDbfs from a dictionary."""
+        return cls(disable_legacy_dbfs=_from_dict(d, 'disable_legacy_dbfs', BooleanMessage),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
+@dataclass
+class DisableLegacyFeatures:
+    disable_legacy_features: BooleanMessage
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DisableLegacyFeatures into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.disable_legacy_features:
+            body['disable_legacy_features'] = self.disable_legacy_features.as_dict()
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures:
+        """Deserializes the DisableLegacyFeatures from a dictionary."""
+        return cls(disable_legacy_features=_from_dict(d, 'disable_legacy_features', BooleanMessage),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
+@dataclass
+class EmailConfig:
+    addresses: Optional[List[str]] = None
+    """Email addresses to notify."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EmailConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.addresses: body['addresses'] = [v for v in self.addresses]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EmailConfig:
+        """Deserializes the EmailConfig from a dictionary."""
+        return cls(addresses=d.get('addresses', None))
+
+
+@dataclass
+class Empty:
+
+    def as_dict(self) -> dict:
+        """Serializes the Empty into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Empty:
+        """Deserializes the Empty from a dictionary."""
+        return cls()
+
+
 @dataclass
 class EnhancedSecurityMonitoring:
     """SHIELD feature: ESM"""
@@ -920,6 +1202,48 @@ def from_dict(cls, d: Dict[str, any]) -> FetchIpAccessListResponse:
         return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo))
 
 
+@dataclass
+class GenericWebhookConfig:
+    password: Optional[str] = None
+    """[Input-Only][Optional] Password for webhook."""
+
+    password_set: Optional[bool] = None
+    """[Output-Only] Whether password is set."""
+
+    url: Optional[str] = None
+    """[Input-Only] URL for webhook."""
+
+    url_set: Optional[bool] = None
+    """[Output-Only] Whether URL is set."""
+
+    username: Optional[str] = None
+    """[Input-Only][Optional] Username for webhook."""
+
+    username_set: Optional[bool] = None
+    """[Output-Only] Whether username is set."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenericWebhookConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.password is not None: body['password'] = self.password
+        if self.password_set is not None: body['password_set'] = self.password_set
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        if self.username is not None: body['username'] = self.username
+        if self.username_set is not None: body['username_set'] = self.username_set
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenericWebhookConfig:
+        """Deserializes the GenericWebhookConfig from a dictionary."""
+        return cls(password=d.get('password', None),
+                   password_set=d.get('password_set', None),
+                   url=d.get('url', None),
+                   url_set=d.get('url_set', None),
+                   username=d.get('username', None),
+                   username_set=d.get('username_set', None))
+
+
 @dataclass
 class GetIpAccessListResponse:
     ip_access_list: Optional[IpAccessListInfo] = None
@@ -1118,6 +1442,54 @@ def from_dict(cls, d: Dict[str, any]) -> ListNetworkConnectivityConfigurationsRe
                    next_page_token=d.get('next_page_token', None))
 
 
+@dataclass
+class ListNotificationDestinationsResponse:
+    next_page_token: Optional[str] = None
+    """Page token for next of results."""
+
+    results: Optional[List[ListNotificationDestinationsResult]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ListNotificationDestinationsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResponse:
+        """Deserializes the ListNotificationDestinationsResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   results=_repeated_dict(d, 'results', ListNotificationDestinationsResult))
+
+
+@dataclass
+class ListNotificationDestinationsResult:
+    destination_type: Optional[DestinationType] = None
+    """[Output-only] The type of the notification destination. The type can not be changed once set."""
+
+    display_name: Optional[str] = None
+    """The display name for the notification destination."""
+
+    id: Optional[str] = None
+    """UUID identifying notification destination."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListNotificationDestinationsResult into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.destination_type is not None: body['destination_type'] = self.destination_type.value
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResult:
+        """Deserializes the ListNotificationDestinationsResult from a dictionary."""
+        return cls(destination_type=_enum(d, 'destination_type', DestinationType),
+                   display_name=d.get('display_name', None),
+                   id=d.get('id', None))
+
+
 @dataclass
 class ListPublicTokensResponse:
     token_infos: Optional[List[PublicTokenInfo]] = None
@@ -1164,6 +1536,27 @@ class ListType(Enum):
     BLOCK = 'BLOCK'
 
 
+@dataclass
+class MicrosoftTeamsConfig:
+    url: Optional[str] = None
+    """[Input-Only] URL for Microsoft Teams."""
+
+    url_set: Optional[bool] = None
+    """[Output-Only] Whether URL is set."""
+
+    def as_dict(self) -> dict:
+        """Serializes the MicrosoftTeamsConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> MicrosoftTeamsConfig:
+        """Deserializes the MicrosoftTeamsConfig from a dictionary."""
+        return cls(url=d.get('url', None), url_set=d.get('url_set', None))
+
+
 @dataclass
 class NccAwsStableIpRule:
     """The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to
@@ -1451,11 +1844,66 @@ def from_dict(cls, d: Dict[str, any]) -> NetworkConnectivityConfiguration:
 
 
 @dataclass
-class PartitionId:
-    """Partition by workspace or account"""
+class NotificationDestination:
+    config: Optional[Config] = None
+    """The configuration for the notification destination. Will be exactly one of the nested configs.
+    Only returns for users with workspace admin permissions."""
 
-    workspace_id: Optional[int] = None
-    """The ID of the workspace."""
+    destination_type: Optional[DestinationType] = None
+    """[Output-only] The type of the notification destination. The type can not be changed once set."""
+
+    display_name: Optional[str] = None
+    """The display name for the notification destination."""
+
+    id: Optional[str] = None
+    """UUID identifying notification destination."""
+
+    def as_dict(self) -> dict:
+        """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.config: body['config'] = self.config.as_dict()
+        if self.destination_type is not None: body['destination_type'] = self.destination_type.value
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> NotificationDestination:
+        """Deserializes the NotificationDestination from a dictionary."""
+        return cls(config=_from_dict(d, 'config', Config),
+                   destination_type=_enum(d, 'destination_type', DestinationType),
+                   display_name=d.get('display_name', None),
+                   id=d.get('id', None))
+
+
+@dataclass
+class PagerdutyConfig:
+    integration_key: Optional[str] = None
+    """[Input-Only] Integration key for PagerDuty."""
+
+    integration_key_set: Optional[bool] = None
+    """[Output-Only] Whether integration key is set."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PagerdutyConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.integration_key is not None: body['integration_key'] = self.integration_key
+        if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PagerdutyConfig:
+        """Deserializes the PagerdutyConfig from a dictionary."""
+        return cls(integration_key=d.get('integration_key', None),
+                   integration_key_set=d.get('integration_key_set', None))
+
+
+@dataclass
+class PartitionId:
+    """Partition by workspace or account"""
+
+    workspace_id: Optional[int] = None
+    """The ID of the workspace."""
 
     def as_dict(self) -> dict:
         """Serializes the PartitionId into a dictionary suitable for use as a JSON request body."""
@@ -1642,7 +2090,6 @@ class RestrictWorkspaceAdminsMessageStatus(Enum):
 
     ALLOW_ALL = 'ALLOW_ALL'
     RESTRICT_TOKENS_AND_JOB_RUN_AS = 'RESTRICT_TOKENS_AND_JOB_RUN_AS'
-    STATUS_UNSPECIFIED = 'STATUS_UNSPECIFIED'
 
 
 @dataclass
@@ -1726,6 +2173,27 @@ def from_dict(cls, d: Dict[str, any]) -> SetStatusResponse:
         return cls()
 
 
+@dataclass
+class SlackConfig:
+    url: Optional[str] = None
+    """[Input-Only] URL for Slack destination."""
+
+    url_set: Optional[bool] = None
+    """[Output-Only] Whether URL is set."""
+
+    def as_dict(self) -> dict:
+        """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> SlackConfig:
+        """Deserializes the SlackConfig from a dictionary."""
+        return cls(url=d.get('url', None), url_set=d.get('url_set', None))
+
+
 @dataclass
 class StringMessage:
     value: Optional[str] = None
@@ -1837,6 +2305,9 @@ class TokenInfo:
     token_id: Optional[str] = None
     """ID of the token."""
 
+    workspace_id: Optional[int] = None
+    """If applicable, the ID of the workspace that the token was created in."""
+
     def as_dict(self) -> dict:
         """Serializes the TokenInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -1847,6 +2318,7 @@ def as_dict(self) -> dict:
         if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
         if self.owner_id is not None: body['owner_id'] = self.owner_id
         if self.token_id is not None: body['token_id'] = self.token_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
     @classmethod
@@ -1858,7 +2330,8 @@ def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
                    creation_time=d.get('creation_time', None),
                    expiry_time=d.get('expiry_time', None),
                    owner_id=d.get('owner_id', None),
-                   token_id=d.get('token_id', None))
+                   token_id=d.get('token_id', None),
+                   workspace_id=d.get('workspace_id', None))
 
 
 @dataclass
@@ -1958,6 +2431,7 @@ def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsRequest:
 class TokenType(Enum):
     """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported."""
 
+    ARCLIGHT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN'
     AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN'
 
 
@@ -2088,6 +2562,96 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateDefaultNamespaceSettingRequest:
                    setting=_from_dict(d, 'setting', DefaultNamespaceSetting))
 
 
+@dataclass
+class UpdateDisableLegacyAccessRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: DisableLegacyAccess
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyAccessRequest:
+        """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', DisableLegacyAccess))
+
+
+@dataclass
+class UpdateDisableLegacyDbfsRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: DisableLegacyDbfs
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyDbfsRequest:
+        """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', DisableLegacyDbfs))
+
+
+@dataclass
+class UpdateDisableLegacyFeaturesRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: DisableLegacyFeatures
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyFeaturesRequest:
+        """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', DisableLegacyFeatures))
+
+
 @dataclass
 class UpdateEnhancedSecurityMonitoringSettingRequest:
     """Details required to update a setting."""
@@ -2189,6 +2753,32 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateIpAccessList:
                    list_type=_enum(d, 'list_type', ListType))
 
 
+@dataclass
+class UpdateNotificationDestinationRequest:
+    config: Optional[Config] = None
+    """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs."""
+
+    display_name: Optional[str] = None
+    """The display name for the notification destination."""
+
+    id: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.config: body['config'] = self.config.as_dict()
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateNotificationDestinationRequest:
+        """Deserializes the UpdateNotificationDestinationRequest from a dictionary."""
+        return cls(config=_from_dict(d, 'config', Config),
+                   display_name=d.get('display_name', None),
+                   id=d.get('id', None))
+
+
 @dataclass
 class UpdatePersonalComputeSettingRequest:
     """Details required to update a setting."""
@@ -2488,6 +3078,7 @@ def __init__(self, api_client):
         self._api = api_client
 
         self._csp_enablement_account = CspEnablementAccountAPI(self._api)
+        self._disable_legacy_features = DisableLegacyFeaturesAPI(self._api)
         self._esm_enablement_account = EsmEnablementAccountAPI(self._api)
         self._personal_compute = PersonalComputeAPI(self._api)
 
@@ -2496,6 +3087,11 @@ def csp_enablement_account(self) -> CspEnablementAccountAPI:
         """The compliance security profile settings at the account level control whether to enable it for new workspaces."""
         return self._csp_enablement_account
 
+    @property
+    def disable_legacy_features(self) -> DisableLegacyFeaturesAPI:
+        """Disable legacy features for new Databricks workspaces."""
+        return self._disable_legacy_features
+
     @property
     def esm_enablement_account(self) -> EsmEnablementAccountAPI:
         """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces."""
@@ -2849,6 +3445,273 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting,
         return DefaultNamespaceSetting.from_dict(res)
 
 
+class DisableLegacyAccessAPI:
+    """'Disabling legacy access' has the following impacts:
+    
+    1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+    Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+    Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+    Unity Catalog access on all path based access."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyAccessResponse:
+        """Delete Legacy Access Disablement Status.
+        
+        Deletes legacy access disablement status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyAccessResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE',
+                           '/api/2.0/settings/types/disable_legacy_access/names/default',
+                           query=query,
+                           headers=headers)
+        return DeleteDisableLegacyAccessResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> DisableLegacyAccess:
+        """Retrieve Legacy Access Disablement Status.
+        
+        Retrieves legacy access disablement Status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyAccess`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           '/api/2.0/settings/types/disable_legacy_access/names/default',
+                           query=query,
+                           headers=headers)
+        return DisableLegacyAccess.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: DisableLegacyAccess,
+               field_mask: str) -> DisableLegacyAccess:
+        """Update Legacy Access Disablement Status.
+        
+        Updates legacy access disablement status.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyAccess`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyAccess`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           '/api/2.0/settings/types/disable_legacy_access/names/default',
+                           body=body,
+                           headers=headers)
+        return DisableLegacyAccess.from_dict(res)
+
+
+class DisableLegacyDbfsAPI:
+    """When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
+    mounts). When the setting is off, all DBFS functionality is enabled"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse:
+        """Delete the disable legacy DBFS setting.
+        
+        Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyDbfsResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE',
+                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
+                           query=query,
+                           headers=headers)
+        return DeleteDisableLegacyDbfsResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> DisableLegacyDbfs:
+        """Get the disable legacy DBFS setting.
+        
+        Gets the disable legacy DBFS setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyDbfs`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
+                           query=query,
+                           headers=headers)
+        return DisableLegacyDbfs.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs:
+        """Update the disable legacy DBFS setting.
+        
+        Updates the disable legacy DBFS setting for the workspace.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyDbfs`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyDbfs`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
+                           body=body,
+                           headers=headers)
+        return DisableLegacyDbfs.from_dict(res)
+
+
+class DisableLegacyFeaturesAPI:
+    """Disable legacy features for new Databricks workspaces.
+    
+    For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+    provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+    prior to 13.3LTS."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesResponse:
+        """Delete the disable legacy features setting.
+        
+        Deletes the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyFeaturesResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'DELETE',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            query=query,
+            headers=headers)
+        return DeleteDisableLegacyFeaturesResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> DisableLegacyFeatures:
+        """Get the disable legacy features setting.
+        
+        Gets the value of the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyFeatures`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            query=query,
+            headers=headers)
+        return DisableLegacyFeatures.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: DisableLegacyFeatures,
+               field_mask: str) -> DisableLegacyFeatures:
+        """Update the disable legacy features setting.
+        
+        Updates the value of the disable legacy features setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyFeatures`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyFeatures`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do(
+            'PATCH',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            body=body,
+            headers=headers)
+        return DisableLegacyFeatures.from_dict(res)
+
+
 class EnhancedSecurityMonitoringAPI:
     """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
     security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
@@ -3402,6 +4265,122 @@ def list_private_endpoint_rules(
             query['page_token'] = json['next_page_token']
 
 
+class NotificationDestinationsAPI:
+    """The notification destinations API lets you programmatically manage a workspace's notification
+    destinations. Notification destinations are used to send notifications for query alerts and jobs to
+    destinations outside of Databricks. Only workspace admins can create, update, and delete notification
+    destinations."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               *,
+               config: Optional[Config] = None,
+               display_name: Optional[str] = None) -> NotificationDestination:
+        """Create a notification destination.
+        
+        Creates a notification destination. Requires workspace admin permissions.
+        
+        :param config: :class:`Config` (optional)
+          The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
+        :param display_name: str (optional)
+          The display name for the notification destination.
+        
+        :returns: :class:`NotificationDestination`
+        """
+        body = {}
+        if config is not None: body['config'] = config.as_dict()
+        if display_name is not None: body['display_name'] = display_name
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/notification-destinations', body=body, headers=headers)
+        return NotificationDestination.from_dict(res)
+
+    def delete(self, id: str):
+        """Delete a notification destination.
+        
+        Deletes a notification destination. Requires workspace admin permissions.
+        
+        :param id: str
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE', f'/api/2.0/notification-destinations/{id}', headers=headers)
+
+    def get(self, id: str) -> NotificationDestination:
+        """Get a notification destination.
+        
+        Gets a notification destination.
+        
+        :param id: str
+        
+        :returns: :class:`NotificationDestination`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/notification-destinations/{id}', headers=headers)
+        return NotificationDestination.from_dict(res)
+
+    def list(self,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[ListNotificationDestinationsResult]:
+        """List notification destinations.
+        
+        Lists notification destinations.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ListNotificationDestinationsResult`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.0/notification-destinations', query=query, headers=headers)
+            if 'results' in json:
+                for v in json['results']:
+                    yield ListNotificationDestinationsResult.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self,
+               id: str,
+               *,
+               config: Optional[Config] = None,
+               display_name: Optional[str] = None) -> NotificationDestination:
+        """Update a notification destination.
+        
+        Updates a notification destination. Requires workspace admin permissions. At least one field is
+        required in the request body.
+        
+        :param id: str
+        :param config: :class:`Config` (optional)
+          The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
+        :param display_name: str (optional)
+          The display name for the notification destination.
+        
+        :returns: :class:`NotificationDestination`
+        """
+        body = {}
+        if config is not None: body['config'] = config.as_dict()
+        if display_name is not None: body['display_name'] = display_name
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH', f'/api/2.0/notification-destinations/{id}', body=body, headers=headers)
+        return NotificationDestination.from_dict(res)
+
+
 class PersonalComputeAPI:
     """The Personal Compute enablement setting lets you control which users can use the Personal Compute default
     policy to create compute resources. By default all users in all workspaces have access (ON), but you can
@@ -3604,6 +4583,8 @@ def __init__(self, api_client):
         self._automatic_cluster_update = AutomaticClusterUpdateAPI(self._api)
         self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api)
         self._default_namespace = DefaultNamespaceAPI(self._api)
+        self._disable_legacy_access = DisableLegacyAccessAPI(self._api)
+        self._disable_legacy_dbfs = DisableLegacyDbfsAPI(self._api)
         self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api)
         self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api)
 
@@ -3622,6 +4603,16 @@ def default_namespace(self) -> DefaultNamespaceAPI:
         """The default namespace setting API allows users to configure the default namespace for a Databricks workspace."""
         return self._default_namespace
 
+    @property
+    def disable_legacy_access(self) -> DisableLegacyAccessAPI:
+        """'Disabling legacy access' has the following impacts: 1."""
+        return self._disable_legacy_access
+
+    @property
+    def disable_legacy_dbfs(self) -> DisableLegacyDbfsAPI:
+        """When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new mounts)."""
+        return self._disable_legacy_dbfs
+
     @property
     def enhanced_security_monitoring(self) -> EnhancedSecurityMonitoringAPI:
         """Controls whether enhanced security monitoring is enabled for the current workspace."""
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index d716fad93..772bc7aee 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -483,6 +483,9 @@ class CreateRecipient:
     when the __authentication_type__ is **DATABRICKS**. The identifier is of format
     __cloud__:__region__:__metastore-uuid__."""
 
+    expiration_time: Optional[int] = None
+    """Expiration timestamp of the token, in epoch milliseconds."""
+
     ip_access_list: Optional[IpAccessList] = None
     """IP Access List"""
 
@@ -503,6 +506,7 @@ def as_dict(self) -> dict:
         if self.comment is not None: body['comment'] = self.comment
         if self.data_recipient_global_metastore_id is not None:
             body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         if self.name is not None: body['name'] = self.name
         if self.owner is not None: body['owner'] = self.owner
@@ -516,6 +520,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateRecipient:
         return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType),
                    comment=d.get('comment', None),
                    data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None),
+                   expiration_time=d.get('expiration_time', None),
                    ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList),
                    name=d.get('name', None),
                    owner=d.get('owner', None),
@@ -580,19 +585,25 @@ def from_dict(cls, d: Dict[str, any]) -> GetActivationUrlInfoResponse:
 
 @dataclass
 class GetRecipientSharePermissionsResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
     permissions_out: Optional[List[ShareToPrivilegeAssignment]] = None
     """An array of data share permissions for a recipient."""
 
     def as_dict(self) -> dict:
         """Serializes the GetRecipientSharePermissionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRecipientSharePermissionsResponse:
         """Deserializes the GetRecipientSharePermissionsResponse from a dictionary."""
-        return cls(permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment))
+        return cls(next_page_token=d.get('next_page_token', None),
+                   permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment))
 
 
 @dataclass
@@ -637,70 +648,94 @@ def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse:
 
 @dataclass
 class ListProviderSharesResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
     shares: Optional[List[ProviderShare]] = None
     """An array of provider shares."""
 
     def as_dict(self) -> dict:
         """Serializes the ListProviderSharesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.shares: body['shares'] = [v.as_dict() for v in self.shares]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProviderSharesResponse:
         """Deserializes the ListProviderSharesResponse from a dictionary."""
-        return cls(shares=_repeated_dict(d, 'shares', ProviderShare))
+        return cls(next_page_token=d.get('next_page_token', None),
+                   shares=_repeated_dict(d, 'shares', ProviderShare))
 
 
 @dataclass
 class ListProvidersResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
     providers: Optional[List[ProviderInfo]] = None
     """An array of provider information objects."""
 
     def as_dict(self) -> dict:
         """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse:
         """Deserializes the ListProvidersResponse from a dictionary."""
-        return cls(providers=_repeated_dict(d, 'providers', ProviderInfo))
+        return cls(next_page_token=d.get('next_page_token', None),
+                   providers=_repeated_dict(d, 'providers', ProviderInfo))
 
 
 @dataclass
 class ListRecipientsResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
     recipients: Optional[List[RecipientInfo]] = None
     """An array of recipient information objects."""
 
     def as_dict(self) -> dict:
         """Serializes the ListRecipientsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRecipientsResponse:
         """Deserializes the ListRecipientsResponse from a dictionary."""
-        return cls(recipients=_repeated_dict(d, 'recipients', RecipientInfo))
+        return cls(next_page_token=d.get('next_page_token', None),
+                   recipients=_repeated_dict(d, 'recipients', RecipientInfo))
 
 
 @dataclass
 class ListSharesResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
     shares: Optional[List[ShareInfo]] = None
     """An array of data share information objects."""
 
     def as_dict(self) -> dict:
         """Serializes the ListSharesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.shares: body['shares'] = [v.as_dict() for v in self.shares]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSharesResponse:
         """Deserializes the ListSharesResponse from a dictionary."""
-        return cls(shares=_repeated_dict(d, 'shares', ShareInfo))
+        return cls(next_page_token=d.get('next_page_token', None),
+                   shares=_repeated_dict(d, 'shares', ShareInfo))
 
 
 @dataclass
@@ -788,6 +823,7 @@ class Privilege(Enum):
     CREATE_VIEW = 'CREATE_VIEW'
     CREATE_VOLUME = 'CREATE_VOLUME'
     EXECUTE = 'EXECUTE'
+    MANAGE = 'MANAGE'
     MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST'
     MODIFY = 'MODIFY'
     READ_FILES = 'READ_FILES'
@@ -1525,6 +1561,9 @@ class UpdateRecipient:
     comment: Optional[str] = None
     """Description about the recipient."""
 
+    expiration_time: Optional[int] = None
+    """Expiration timestamp of the token, in epoch milliseconds."""
+
     ip_access_list: Optional[IpAccessList] = None
     """IP Access List"""
 
@@ -1546,6 +1585,7 @@ def as_dict(self) -> dict:
         """Serializes the UpdateRecipient into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.comment is not None: body['comment'] = self.comment
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         if self.name is not None: body['name'] = self.name
         if self.new_name is not None: body['new_name'] = self.new_name
@@ -1557,6 +1597,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient:
         """Deserializes the UpdateRecipient from a dictionary."""
         return cls(comment=d.get('comment', None),
+                   expiration_time=d.get('expiration_time', None),
                    ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList),
                    name=d.get('name', None),
                    new_name=d.get('new_name', None),
@@ -1625,20 +1666,37 @@ class UpdateSharePermissions:
     changes: Optional[List[catalog.PermissionsChange]] = None
     """Array of permission changes."""
 
+    max_results: Optional[int] = None
+    """Maximum number of permissions to return. - when set to 0, the page length is set to a server
+    configured value (recommended); - when set to a value greater than 0, the page length is the
+    minimum of this value and a server configured value; - when set to a value less than 0, an
+    invalid parameter error is returned; - If not set, all valid permissions are returned (not
+    recommended). - Note: The number of returned permissions might be less than the specified
+    max_results size, even zero. The only definitive indication that no further permissions can be
+    fetched is when the next_page_token is unset from the response."""
+
     name: Optional[str] = None
     """The name of the share."""
 
+    page_token: Optional[str] = None
+    """Opaque pagination token to go to next page based on previous query."""
+
     def as_dict(self) -> dict:
         """Serializes the UpdateSharePermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.changes: body['changes'] = [v.as_dict() for v in self.changes]
+        if self.max_results is not None: body['max_results'] = self.max_results
         if self.name is not None: body['name'] = self.name
+        if self.page_token is not None: body['page_token'] = self.page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions:
         """Deserializes the UpdateSharePermissions from a dictionary."""
-        return cls(changes=_repeated_dict(d, 'changes', catalog.PermissionsChange), name=d.get('name', None))
+        return cls(changes=_repeated_dict(d, 'changes', catalog.PermissionsChange),
+                   max_results=d.get('max_results', None),
+                   name=d.get('name', None),
+                   page_token=d.get('page_token', None))
 
 
 class CleanRoomsAPI:
@@ -1864,7 +1922,11 @@ def get(self, name: str) -> ProviderInfo:
         res = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{name}', headers=headers)
         return ProviderInfo.from_dict(res)
 
-    def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> Iterator[ProviderInfo]:
+    def list(self,
+             *,
+             data_provider_global_metastore_id: Optional[str] = None,
+             max_results: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[ProviderInfo]:
         """List providers.
         
         Gets an array of available authentication providers. The caller must either be a metastore admin or
@@ -1874,6 +1936,16 @@ def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> It
         :param data_provider_global_metastore_id: str (optional)
           If not provided, all providers will be returned. If no providers exist with this ID, no results will
           be returned.
+        :param max_results: int (optional)
+          Maximum number of providers to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid providers are returned (not recommended). - Note: The
+          number of returned providers might be less than the specified max_results size, even zero. The only
+          definitive indication that no further providers can be fetched is when the next_page_token is unset
+          from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: Iterator over :class:`ProviderInfo`
         """
@@ -1881,13 +1953,24 @@ def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> It
         query = {}
         if data_provider_global_metastore_id is not None:
             query['data_provider_global_metastore_id'] = data_provider_global_metastore_id
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers)
-        parsed = ListProvidersResponse.from_dict(json).providers
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers)
+            if 'providers' in json:
+                for v in json['providers']:
+                    yield ProviderInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
-    def list_shares(self, name: str) -> Iterator[ProviderShare]:
+    def list_shares(self,
+                    name: str,
+                    *,
+                    max_results: Optional[int] = None,
+                    page_token: Optional[str] = None) -> Iterator[ProviderShare]:
         """List shares by Provider.
         
         Gets an array of a specified provider's shares within the metastore where:
@@ -1896,13 +1979,29 @@ def list_shares(self, name: str) -> Iterator[ProviderShare]:
         
         :param name: str
           Name of the provider in which to list shares.
+        :param max_results: int (optional)
+          Maximum number of shares to return. - when set to 0, the page length is set to a server configured
+          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - when set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
+          returned shares might be less than the specified max_results size, even zero. The only definitive
+          indication that no further shares can be fetched is when the next_page_token is unset from the
+          response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: Iterator over :class:`ProviderShare`
         """
 
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{name}/shares', headers=headers)
+        json = self._api.do('GET',
+                            f'/api/2.1/unity-catalog/providers/{name}/shares',
+                            query=query,
+                            headers=headers)
         parsed = ListProviderSharesResponse.from_dict(json).shares
         return parsed if parsed is not None else []
 
@@ -2015,6 +2114,7 @@ def create(self,
                *,
                comment: Optional[str] = None,
                data_recipient_global_metastore_id: Optional[str] = None,
+               expiration_time: Optional[int] = None,
                ip_access_list: Optional[IpAccessList] = None,
                owner: Optional[str] = None,
                properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None,
@@ -2034,6 +2134,8 @@ def create(self,
           The global Unity Catalog metastore id provided by the data recipient. This field is required when
           the __authentication_type__ is **DATABRICKS**. The identifier is of format
           __cloud__:__region__:__metastore-uuid__.
+        :param expiration_time: int (optional)
+          Expiration timestamp of the token, in epoch milliseconds.
         :param ip_access_list: :class:`IpAccessList` (optional)
           IP Access List
         :param owner: str (optional)
@@ -2051,6 +2153,7 @@ def create(self,
         if comment is not None: body['comment'] = comment
         if data_recipient_global_metastore_id is not None:
             body['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id
+        if expiration_time is not None: body['expiration_time'] = expiration_time
         if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict()
         if name is not None: body['name'] = name
         if owner is not None: body['owner'] = owner
@@ -2094,7 +2197,11 @@ def get(self, name: str) -> RecipientInfo:
         res = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{name}', headers=headers)
         return RecipientInfo.from_dict(res)
 
-    def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> Iterator[RecipientInfo]:
+    def list(self,
+             *,
+             data_recipient_global_metastore_id: Optional[str] = None,
+             max_results: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[RecipientInfo]:
         """List share recipients.
         
         Gets an array of all share recipients within the current metastore where:
@@ -2105,6 +2212,16 @@ def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> I
         :param data_recipient_global_metastore_id: str (optional)
           If not provided, all recipients will be returned. If no recipients exist with this ID, no results
           will be returned.
+        :param max_results: int (optional)
+          Maximum number of recipients to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The
+          number of returned recipients might be less than the specified max_results size, even zero. The only
+          definitive indication that no further recipients can be fetched is when the next_page_token is unset
+          from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: Iterator over :class:`RecipientInfo`
         """
@@ -2112,11 +2229,18 @@ def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> I
         query = {}
         if data_recipient_global_metastore_id is not None:
             query['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers)
-        parsed = ListRecipientsResponse.from_dict(json).recipients
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers)
+            if 'recipients' in json:
+                for v in json['recipients']:
+                    yield RecipientInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
     def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> RecipientInfo:
         """Rotate a token.
@@ -2144,7 +2268,11 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci
                            headers=headers)
         return RecipientInfo.from_dict(res)
 
-    def share_permissions(self, name: str) -> GetRecipientSharePermissionsResponse:
+    def share_permissions(self,
+                          name: str,
+                          *,
+                          max_results: Optional[int] = None,
+                          page_token: Optional[str] = None) -> GetRecipientSharePermissionsResponse:
         """Get recipient share permissions.
         
         Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the
@@ -2152,14 +2280,28 @@ def share_permissions(self, name: str) -> GetRecipientSharePermissionsResponse:
         
         :param name: str
           The name of the Recipient.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: :class:`GetRecipientSharePermissionsResponse`
         """
 
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET',
                            f'/api/2.1/unity-catalog/recipients/{name}/share-permissions',
+                           query=query,
                            headers=headers)
         return GetRecipientSharePermissionsResponse.from_dict(res)
 
@@ -2167,6 +2309,7 @@ def update(self,
                name: str,
                *,
                comment: Optional[str] = None,
+               expiration_time: Optional[int] = None,
                ip_access_list: Optional[IpAccessList] = None,
                new_name: Optional[str] = None,
                owner: Optional[str] = None,
@@ -2181,6 +2324,8 @@ def update(self,
           Name of the recipient.
         :param comment: str (optional)
           Description about the recipient.
+        :param expiration_time: int (optional)
+          Expiration timestamp of the token, in epoch milliseconds.
         :param ip_access_list: :class:`IpAccessList` (optional)
           IP Access List
         :param new_name: str (optional)
@@ -2196,6 +2341,7 @@ def update(self,
         """
         body = {}
         if comment is not None: body['comment'] = comment
+        if expiration_time is not None: body['expiration_time'] = expiration_time
         if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict()
         if new_name is not None: body['new_name'] = new_name
         if owner is not None: body['owner'] = owner
@@ -2278,22 +2424,48 @@ def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> Share
         res = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{name}', query=query, headers=headers)
         return ShareInfo.from_dict(res)
 
-    def list(self) -> Iterator[ShareInfo]:
+    def list(self,
+             *,
+             max_results: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[ShareInfo]:
         """List shares.
         
         Gets an array of data object shares from the metastore. The caller must be a metastore admin or the
         owner of the share. There is no guarantee of a specific ordering of the elements in the array.
         
+        :param max_results: int (optional)
+          Maximum number of shares to return. - when set to 0, the page length is set to a server configured
+          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - when set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
+          returned shares might be less than the specified max_results size, even zero. The only definitive
+          indication that no further shares can be fetched is when the next_page_token is unset from the
+          response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
         :returns: Iterator over :class:`ShareInfo`
         """
 
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET', '/api/2.1/unity-catalog/shares', headers=headers)
-        parsed = ListSharesResponse.from_dict(json).shares
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do('GET', '/api/2.1/unity-catalog/shares', query=query, headers=headers)
+            if 'shares' in json:
+                for v in json['shares']:
+                    yield ShareInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
-    def share_permissions(self, name: str) -> catalog.PermissionsList:
+    def share_permissions(self,
+                          name: str,
+                          *,
+                          max_results: Optional[int] = None,
+                          page_token: Optional[str] = None) -> catalog.PermissionsList:
         """Get permissions.
         
         Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the
@@ -2301,14 +2473,30 @@ def share_permissions(self, name: str) -> catalog.PermissionsList:
         
         :param name: str
           The name of the share.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: :class:`PermissionsList`
         """
 
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{name}/permissions', headers=headers)
-        return PermissionsList.from_dict(res)
+        res = self._api.do('GET',
+                           f'/api/2.1/unity-catalog/shares/{name}/permissions',
+                           query=query,
+                           headers=headers)
+        return catalog.PermissionsList.from_dict(res)
 
     def update(self,
                name: str,
@@ -2362,7 +2550,12 @@ def update(self,
         res = self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{name}', body=body, headers=headers)
         return ShareInfo.from_dict(res)
 
-    def update_permissions(self, name: str, *, changes: Optional[List[catalog.PermissionsChange]] = None):
+    def update_permissions(self,
+                           name: str,
+                           *,
+                           changes: Optional[List[catalog.PermissionsChange]] = None,
+                           max_results: Optional[int] = None,
+                           page_token: Optional[str] = None):
         """Update permissions.
         
         Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an
@@ -2375,11 +2568,28 @@ def update_permissions(self, name: str, *, changes: Optional[List[catalog.Permis
           The name of the share.
         :param changes: List[:class:`PermissionsChange`] (optional)
           Array of permission changes.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         
         """
         body = {}
+        query = {}
         if changes is not None: body['changes'] = [v.as_dict() for v in changes]
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{name}/permissions', body=body, headers=headers)
+        self._api.do('PATCH',
+                     f'/api/2.1/unity-catalog/shares/{name}/permissions',
+                     query=query,
+                     body=body,
+                     headers=headers)
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index fa7f93f6e..7a224feeb 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -46,69 +46,211 @@ def from_dict(cls, d: Dict[str, any]) -> AccessControl:
 
 @dataclass
 class Alert:
-    created_at: Optional[str] = None
-    """Timestamp when the alert was created."""
+    condition: Optional[AlertCondition] = None
+    """Trigger conditions of the alert."""
+
+    create_time: Optional[str] = None
+    """The timestamp indicating when the alert was created."""
+
+    custom_body: Optional[str] = None
+    """Custom body of alert notification, if it exists. See [here] for custom templating instructions.
+    
+    [here]: https://docs.databricks.com/sql/user/alerts/index.html"""
+
+    custom_subject: Optional[str] = None
+    """Custom subject of alert notification, if it exists. This can include email subject entries and
+    Slack notification headers, for example. See [here] for custom templating instructions.
+    
+    [here]: https://docs.databricks.com/sql/user/alerts/index.html"""
+
+    display_name: Optional[str] = None
+    """The display name of the alert."""
 
     id: Optional[str] = None
-    """Alert ID."""
+    """UUID identifying the alert."""
 
-    last_triggered_at: Optional[str] = None
-    """Timestamp when the alert was last triggered."""
+    lifecycle_state: Optional[LifecycleState] = None
+    """The workspace state of the alert. Used for tracking trashed status."""
 
-    name: Optional[str] = None
-    """Name of the alert."""
+    notify_on_ok: Optional[bool] = None
+    """Whether to notify alert subscribers when alert returns back to normal."""
 
-    options: Optional[AlertOptions] = None
-    """Alert configuration options."""
+    owner_user_name: Optional[str] = None
+    """The owner's username. This field is set to "Unavailable" if the user has been deleted."""
 
-    parent: Optional[str] = None
-    """The identifier of the workspace folder containing the object."""
+    parent_path: Optional[str] = None
+    """The workspace path of the folder containing the alert."""
 
-    query: Optional[AlertQuery] = None
+    query_id: Optional[str] = None
+    """UUID of the query attached to the alert."""
 
-    rearm: Optional[int] = None
-    """Number of seconds after being triggered before the alert rearms itself and can be triggered
-    again. If `null`, alert will never be triggered again."""
+    seconds_to_retrigger: Optional[int] = None
+    """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
+    can be triggered again. If 0 or not specified, the alert will not be triggered again."""
 
     state: Optional[AlertState] = None
-    """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
-    and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
+    """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not
+    yet been evaluated or ran into an error during the last evaluation."""
 
-    updated_at: Optional[str] = None
-    """Timestamp when the alert was last updated."""
+    trigger_time: Optional[str] = None
+    """Timestamp when the alert was last triggered, if the alert has been triggered before."""
 
-    user: Optional[User] = None
+    update_time: Optional[str] = None
+    """The timestamp indicating when the alert was updated."""
 
     def as_dict(self) -> dict:
         """Serializes the Alert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.condition: body['condition'] = self.condition.as_dict()
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
         if self.id is not None: body['id'] = self.id
-        if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.parent is not None: body['parent'] = self.parent
-        if self.query: body['query'] = self.query.as_dict()
-        if self.rearm is not None: body['rearm'] = self.rearm
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
         if self.state is not None: body['state'] = self.state.value
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user.as_dict()
+        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
+        if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Alert:
         """Deserializes the Alert from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
+        return cls(condition=_from_dict(d, 'condition', AlertCondition),
+                   create_time=d.get('create_time', None),
+                   custom_body=d.get('custom_body', None),
+                   custom_subject=d.get('custom_subject', None),
+                   display_name=d.get('display_name', None),
                    id=d.get('id', None),
-                   last_triggered_at=d.get('last_triggered_at', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', AlertOptions),
-                   parent=d.get('parent', None),
-                   query=_from_dict(d, 'query', AlertQuery),
-                   rearm=d.get('rearm', None),
+                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
+                   notify_on_ok=d.get('notify_on_ok', None),
+                   owner_user_name=d.get('owner_user_name', None),
+                   parent_path=d.get('parent_path', None),
+                   query_id=d.get('query_id', None),
+                   seconds_to_retrigger=d.get('seconds_to_retrigger', None),
                    state=_enum(d, 'state', AlertState),
-                   updated_at=d.get('updated_at', None),
-                   user=_from_dict(d, 'user', User))
+                   trigger_time=d.get('trigger_time', None),
+                   update_time=d.get('update_time', None))
+
+
+@dataclass
+class AlertCondition:
+    empty_result_state: Optional[AlertState] = None
+    """Alert state if result is empty."""
+
+    op: Optional[AlertOperator] = None
+    """Operator used for comparison in alert evaluation."""
+
+    operand: Optional[AlertConditionOperand] = None
+    """Name of the column from the query result to use for comparison in alert evaluation."""
+
+    threshold: Optional[AlertConditionThreshold] = None
+    """Threshold value used for comparison in alert evaluation."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AlertCondition into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value
+        if self.op is not None: body['op'] = self.op.value
+        if self.operand: body['operand'] = self.operand.as_dict()
+        if self.threshold: body['threshold'] = self.threshold.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AlertCondition:
+        """Deserializes the AlertCondition from a dictionary."""
+        return cls(empty_result_state=_enum(d, 'empty_result_state', AlertState),
+                   op=_enum(d, 'op', AlertOperator),
+                   operand=_from_dict(d, 'operand', AlertConditionOperand),
+                   threshold=_from_dict(d, 'threshold', AlertConditionThreshold))
+
+
+@dataclass
+class AlertConditionOperand:
+    column: Optional[AlertOperandColumn] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AlertConditionOperand into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.column: body['column'] = self.column.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand:
+        """Deserializes the AlertConditionOperand from a dictionary."""
+        return cls(column=_from_dict(d, 'column', AlertOperandColumn))
+
+
+@dataclass
+class AlertConditionThreshold:
+    value: Optional[AlertOperandValue] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AlertConditionThreshold into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.value: body['value'] = self.value.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold:
+        """Deserializes the AlertConditionThreshold from a dictionary."""
+        return cls(value=_from_dict(d, 'value', AlertOperandValue))
+
+
+@dataclass
+class AlertOperandColumn:
+    name: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AlertOperandColumn into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn:
+        """Deserializes the AlertOperandColumn from a dictionary."""
+        return cls(name=d.get('name', None))
+
+
+@dataclass
+class AlertOperandValue:
+    bool_value: Optional[bool] = None
+
+    double_value: Optional[float] = None
+
+    string_value: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AlertOperandValue into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.bool_value is not None: body['bool_value'] = self.bool_value
+        if self.double_value is not None: body['double_value'] = self.double_value
+        if self.string_value is not None: body['string_value'] = self.string_value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue:
+        """Deserializes the AlertOperandValue from a dictionary."""
+        return cls(bool_value=d.get('bool_value', None),
+                   double_value=d.get('double_value', None),
+                   string_value=d.get('string_value', None))
+
+
+class AlertOperator(Enum):
+
+    EQUAL = 'EQUAL'
+    GREATER_THAN = 'GREATER_THAN'
+    GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL'
+    IS_NULL = 'IS_NULL'
+    LESS_THAN = 'LESS_THAN'
+    LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL'
+    NOT_EQUAL = 'NOT_EQUAL'
 
 
 @dataclass
@@ -259,12 +401,10 @@ def from_dict(cls, d: Dict[str, any]) -> AlertQuery:
 
 
 class AlertState(Enum):
-    """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
-    and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
 
-    OK = 'ok'
-    TRIGGERED = 'triggered'
-    UNKNOWN = 'unknown'
+    OK = 'OK'
+    TRIGGERED = 'TRIGGERED'
+    UNKNOWN = 'UNKNOWN'
 
 
 @dataclass
@@ -319,6 +459,9 @@ def from_dict(cls, d: Dict[str, any]) -> CancelExecutionResponse:
 
 @dataclass
 class Channel:
+    """Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be
+    chosen only when `dbsql_version` is specified."""
+
     dbsql_version: Optional[str] = None
 
     name: Optional[ChannelName] = None
@@ -338,10 +481,10 @@ def from_dict(cls, d: Dict[str, any]) -> Channel:
 
 @dataclass
 class ChannelInfo:
-    """Channel information for the SQL warehouse at the time of query execution"""
+    """Details about a Channel."""
 
     dbsql_version: Optional[str] = None
-    """DBSQL Version the channel is mapped to"""
+    """DB SQL Version the Channel is mapped to."""
 
     name: Optional[ChannelName] = None
     """Name of the channel"""
@@ -364,7 +507,6 @@ class ChannelName(Enum):
     CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT'
     CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM'
     CHANNEL_NAME_PREVIEW = 'CHANNEL_NAME_PREVIEW'
-    CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS'
     CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED'
 
 
@@ -480,13 +622,225 @@ def from_dict(cls, d: Dict[str, any]) -> CreateAlert:
                    rearm=d.get('rearm', None))
 
 
+@dataclass
+class CreateAlertRequest:
+    alert: Optional[CreateAlertRequestAlert] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.alert: body['alert'] = self.alert.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest:
+        """Deserializes the CreateAlertRequest from a dictionary."""
+        return cls(alert=_from_dict(d, 'alert', CreateAlertRequestAlert))
+
+
+@dataclass
+class CreateAlertRequestAlert:
+    condition: Optional[AlertCondition] = None
+    """Trigger conditions of the alert."""
+
+    custom_body: Optional[str] = None
+    """Custom body of alert notification, if it exists. See [here] for custom templating instructions.
+    
+    [here]: https://docs.databricks.com/sql/user/alerts/index.html"""
+
+    custom_subject: Optional[str] = None
+    """Custom subject of alert notification, if it exists. This can include email subject entries and
+    Slack notification headers, for example. See [here] for custom templating instructions.
+    
+    [here]: https://docs.databricks.com/sql/user/alerts/index.html"""
+
+    display_name: Optional[str] = None
+    """The display name of the alert."""
+
+    notify_on_ok: Optional[bool] = None
+    """Whether to notify alert subscribers when alert returns back to normal."""
+
+    parent_path: Optional[str] = None
+    """The workspace path of the folder containing the alert."""
+
+    query_id: Optional[str] = None
+    """UUID of the query attached to the alert."""
+
+    seconds_to_retrigger: Optional[int] = None
+    """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
+    can be triggered again. If 0 or not specified, the alert will not be triggered again."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateAlertRequestAlert into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.condition: body['condition'] = self.condition.as_dict()
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert:
+        """Deserializes the CreateAlertRequestAlert from a dictionary."""
+        return cls(condition=_from_dict(d, 'condition', AlertCondition),
+                   custom_body=d.get('custom_body', None),
+                   custom_subject=d.get('custom_subject', None),
+                   display_name=d.get('display_name', None),
+                   notify_on_ok=d.get('notify_on_ok', None),
+                   parent_path=d.get('parent_path', None),
+                   query_id=d.get('query_id', None),
+                   seconds_to_retrigger=d.get('seconds_to_retrigger', None))
+
+
+@dataclass
+class CreateQueryRequest:
+    query: Optional[CreateQueryRequestQuery] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.query: body['query'] = self.query.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest:
+        """Deserializes the CreateQueryRequest from a dictionary."""
+        return cls(query=_from_dict(d, 'query', CreateQueryRequestQuery))
+
+
+@dataclass
+class CreateQueryRequestQuery:
+    apply_auto_limit: Optional[bool] = None
+    """Whether to apply a 1000 row limit to the query result."""
+
+    catalog: Optional[str] = None
+    """Name of the catalog where this query will be executed."""
+
+    description: Optional[str] = None
+    """General description that conveys additional information about this query such as usage notes."""
+
+    display_name: Optional[str] = None
+    """Display name of the query that appears in list views, widget headings, and on the query page."""
+
+    parameters: Optional[List[QueryParameter]] = None
+    """List of query parameter definitions."""
+
+    parent_path: Optional[str] = None
+    """Workspace path of the workspace folder containing the object."""
+
+    query_text: Optional[str] = None
+    """Text of the query to be run."""
+
+    run_as_mode: Optional[RunAsMode] = None
+    """Sets the "Run as" role for the object."""
+
+    schema: Optional[str] = None
+    """Name of the schema where this query will be executed."""
+
+    tags: Optional[List[str]] = None
+
+    warehouse_id: Optional[str] = None
+    """ID of the SQL warehouse attached to the query."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateQueryRequestQuery into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery:
+        """Deserializes the CreateQueryRequestQuery from a dictionary."""
+        return cls(apply_auto_limit=d.get('apply_auto_limit', None),
+                   catalog=d.get('catalog', None),
+                   description=d.get('description', None),
+                   display_name=d.get('display_name', None),
+                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
+                   parent_path=d.get('parent_path', None),
+                   query_text=d.get('query_text', None),
+                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
+                   schema=d.get('schema', None),
+                   tags=d.get('tags', None),
+                   warehouse_id=d.get('warehouse_id', None))
+
+
+@dataclass
+class CreateVisualizationRequest:
+    visualization: Optional[CreateVisualizationRequestVisualization] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.visualization: body['visualization'] = self.visualization.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest:
+        """Deserializes the CreateVisualizationRequest from a dictionary."""
+        return cls(visualization=_from_dict(d, 'visualization', CreateVisualizationRequestVisualization))
+
+
+@dataclass
+class CreateVisualizationRequestVisualization:
+    display_name: Optional[str] = None
+    """The display name of the visualization."""
+
+    query_id: Optional[str] = None
+    """UUID of the query that the visualization is attached to."""
+
+    serialized_options: Optional[str] = None
+    """The visualization options varies widely from one visualization type to the next and is
+    unsupported. Databricks does not recommend modifying visualization options directly."""
+
+    serialized_query_plan: Optional[str] = None
+    """The visualization query plan varies widely from one visualization type to the next and is
+    unsupported. Databricks does not recommend modifying the visualization query plan directly."""
+
+    type: Optional[str] = None
+    """The type of visualization: counter, table, funnel, and so on."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
+        if self.type is not None: body['type'] = self.type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization:
+        """Deserializes the CreateVisualizationRequestVisualization from a dictionary."""
+        return cls(display_name=d.get('display_name', None),
+                   query_id=d.get('query_id', None),
+                   serialized_options=d.get('serialized_options', None),
+                   serialized_query_plan=d.get('serialized_query_plan', None),
+                   type=d.get('type', None))
+
+
 @dataclass
 class CreateWarehouseRequest:
     auto_stop_mins: Optional[int] = None
     """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries)
     before it is automatically stopped.
     
-    Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+    Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
+    non-serverless warehouses - 0 indicates no autostop.
     
     Defaults to 120 mins"""
 
@@ -913,18 +1267,133 @@ def from_dict(cls, d: Dict[str, any]) -> DataSource:
                    warehouse_id=d.get('warehouse_id', None))
 
 
+class DatePrecision(Enum):
+
+    DAY_PRECISION = 'DAY_PRECISION'
+    MINUTE_PRECISION = 'MINUTE_PRECISION'
+    SECOND_PRECISION = 'SECOND_PRECISION'
+
+
 @dataclass
-class DeleteResponse:
+class DateRange:
+    start: str
+
+    end: str
 
     def as_dict(self) -> dict:
-        """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the DateRange into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.end is not None: body['end'] = self.end
+        if self.start is not None: body['start'] = self.start
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
-        """Deserializes the DeleteResponse from a dictionary."""
-        return cls()
+    def from_dict(cls, d: Dict[str, any]) -> DateRange:
+        """Deserializes the DateRange from a dictionary."""
+        return cls(end=d.get('end', None), start=d.get('start', None))
+
+
+@dataclass
+class DateRangeValue:
+    date_range_value: Optional[DateRange] = None
+    """Manually specified date-time range value."""
+
+    dynamic_date_range_value: Optional[DateRangeValueDynamicDateRange] = None
+    """Dynamic date-time range value based on current date-time."""
+
+    precision: Optional[DatePrecision] = None
+    """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION
+    (YYYY-MM-DD)."""
+
+    start_day_of_week: Optional[int] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the DateRangeValue into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict()
+        if self.dynamic_date_range_value is not None:
+            body['dynamic_date_range_value'] = self.dynamic_date_range_value.value
+        if self.precision is not None: body['precision'] = self.precision.value
+        if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DateRangeValue:
+        """Deserializes the DateRangeValue from a dictionary."""
+        return cls(date_range_value=_from_dict(d, 'date_range_value', DateRange),
+                   dynamic_date_range_value=_enum(d, 'dynamic_date_range_value',
+                                                  DateRangeValueDynamicDateRange),
+                   precision=_enum(d, 'precision', DatePrecision),
+                   start_day_of_week=d.get('start_day_of_week', None))
+
+
+class DateRangeValueDynamicDateRange(Enum):
+
+    LAST_12_MONTHS = 'LAST_12_MONTHS'
+    LAST_14_DAYS = 'LAST_14_DAYS'
+    LAST_24_HOURS = 'LAST_24_HOURS'
+    LAST_30_DAYS = 'LAST_30_DAYS'
+    LAST_60_DAYS = 'LAST_60_DAYS'
+    LAST_7_DAYS = 'LAST_7_DAYS'
+    LAST_8_HOURS = 'LAST_8_HOURS'
+    LAST_90_DAYS = 'LAST_90_DAYS'
+    LAST_HOUR = 'LAST_HOUR'
+    LAST_MONTH = 'LAST_MONTH'
+    LAST_WEEK = 'LAST_WEEK'
+    LAST_YEAR = 'LAST_YEAR'
+    THIS_MONTH = 'THIS_MONTH'
+    THIS_WEEK = 'THIS_WEEK'
+    THIS_YEAR = 'THIS_YEAR'
+    TODAY = 'TODAY'
+    YESTERDAY = 'YESTERDAY'
+
+
+@dataclass
+class DateValue:
+    date_value: Optional[str] = None
+    """Manually specified date-time value."""
+
+    dynamic_date_value: Optional[DateValueDynamicDate] = None
+    """Dynamic date-time value based on current date-time."""
+
+    precision: Optional[DatePrecision] = None
+    """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION
+    (YYYY-MM-DD)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DateValue into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.date_value is not None: body['date_value'] = self.date_value
+        if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value.value
+        if self.precision is not None: body['precision'] = self.precision.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DateValue:
+        """Deserializes the DateValue from a dictionary."""
+        return cls(date_value=d.get('date_value', None),
+                   dynamic_date_value=_enum(d, 'dynamic_date_value', DateValueDynamicDate),
+                   precision=_enum(d, 'precision', DatePrecision))
+
+
+class DateValueDynamicDate(Enum):
+
+    NOW = 'NOW'
+    YESTERDAY = 'YESTERDAY'
+
+
+@dataclass
+class DeleteResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
+        """Deserializes the DeleteResponse from a dictionary."""
+        return cls()
 
 
 @dataclass
@@ -942,26 +1411,6 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteWarehouseResponse:
 
 
 class Disposition(Enum):
-    """The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
-    
-    Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
-    format, in a series of chunks. If a given statement produces a result set with a size larger
-    than 25 MiB, that statement execution is aborted, and no result set will be available.
-    
-    **NOTE** Byte limits are computed based upon internal representations of the result set data,
-    and might not match the sizes visible in JSON responses.
-    
-    Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
-    URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
-    allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
-    resulting links have two important properties:
-    
-    1. They point to resources _external_ to the Databricks compute; therefore any associated
-    authentication information (typically a personal access token, OAuth token, or similar) _must be
-    removed_ when fetching from these links.
-    
-    2. These are presigned URLs with a specific expiration, indicated in the response. The behavior
-    when attempting to use an expired link is cloud specific."""
 
     EXTERNAL_LINKS = 'EXTERNAL_LINKS'
     INLINE = 'INLINE'
@@ -1140,6 +1589,22 @@ def from_dict(cls, d: Dict[str, any]) -> EditWarehouseResponse:
         return cls()
 
 
+@dataclass
+class Empty:
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    def as_dict(self) -> dict:
+        """Serializes the Empty into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Empty:
+        """Deserializes the Empty from a dictionary."""
+        return cls()
+
+
 @dataclass
 class EndpointConfPair:
     key: Optional[str] = None
@@ -1384,6 +1849,33 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTags:
         return cls(custom_tags=_repeated_dict(d, 'custom_tags', EndpointTagPair))
 
 
+@dataclass
+class EnumValue:
+    enum_options: Optional[str] = None
+    """List of valid query parameter values, newline delimited."""
+
+    multi_values_options: Optional[MultiValuesOptions] = None
+    """If specified, allows multiple values to be selected for this parameter."""
+
+    values: Optional[List[str]] = None
+    """List of selected query parameter values."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EnumValue into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.enum_options is not None: body['enum_options'] = self.enum_options
+        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict()
+        if self.values: body['values'] = [v for v in self.values]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EnumValue:
+        """Deserializes the EnumValue from a dictionary."""
+        return cls(enum_options=d.get('enum_options', None),
+                   multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions),
+                   values=d.get('values', None))
+
+
 @dataclass
 class ExecuteStatementRequest:
     statement: str
@@ -1407,26 +1899,6 @@ class ExecuteStatementRequest:
     [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html"""
 
     disposition: Optional[Disposition] = None
-    """The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
-    
-    Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
-    format, in a series of chunks. If a given statement produces a result set with a size larger
-    than 25 MiB, that statement execution is aborted, and no result set will be available.
-    
-    **NOTE** Byte limits are computed based upon internal representations of the result set data,
-    and might not match the sizes visible in JSON responses.
-    
-    Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
-    URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
-    allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
-    resulting links have two important properties:
-    
-    1. They point to resources _external_ to the Databricks compute; therefore any associated
-    authentication information (typically a personal access token, OAuth token, or similar) _must be
-    removed_ when fetching from these links.
-    
-    2. These are presigned URLs with a specific expiration, indicated in the response. The behavior
-    when attempting to use an expired link is cloud specific."""
 
     format: Optional[Format] = None
     """Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
@@ -1565,43 +2037,6 @@ class ExecuteStatementRequestOnWaitTimeout(Enum):
     CONTINUE = 'CONTINUE'
 
 
-@dataclass
-class ExecuteStatementResponse:
-    manifest: Optional[ResultManifest] = None
-    """The result manifest provides schema and metadata for the result set."""
-
-    result: Optional[ResultData] = None
-    """Contains the result data of a single chunk when using `INLINE` disposition. When using
-    `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
-    URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
-    `external_links` array prepares the API to return multiple links in a single response. Currently
-    only a single link is returned.)"""
-
-    statement_id: Optional[str] = None
-    """The statement ID is returned upon successfully submitting a SQL statement, and is a required
-    reference for all subsequent calls."""
-
-    status: Optional[StatementStatus] = None
-    """The status response includes execution state and if relevant, error information."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ExecuteStatementResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.manifest: body['manifest'] = self.manifest.as_dict()
-        if self.result: body['result'] = self.result.as_dict()
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
-        if self.status: body['status'] = self.status.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementResponse:
-        """Deserializes the ExecuteStatementResponse from a dictionary."""
-        return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
-                   result=_from_dict(d, 'result', ResultData),
-                   statement_id=d.get('statement_id', None),
-                   status=_from_dict(d, 'status', StatementStatus))
-
-
 @dataclass
 class ExternalLink:
     byte_count: Optional[int] = None
@@ -1616,9 +2051,6 @@ class ExternalLink:
     which point a new `external_link` must be requested."""
 
     external_link: Optional[str] = None
-    """A presigned URL pointing to a chunk of result data, hosted by an external service, with a short
-    expiration time (<= 15 minutes). As this URL contains a temporary credential, it should be
-    considered sensitive and the client should not expose this URL in a log."""
 
     http_headers: Optional[Dict[str, str]] = None
     """HTTP headers that must be included with a GET request to the `external_link`. Each header is
@@ -1705,43 +2137,6 @@ def from_dict(cls, d: Dict[str, any]) -> GetResponse:
                    object_type=_enum(d, 'object_type', ObjectType))
 
 
-@dataclass
-class GetStatementResponse:
-    manifest: Optional[ResultManifest] = None
-    """The result manifest provides schema and metadata for the result set."""
-
-    result: Optional[ResultData] = None
-    """Contains the result data of a single chunk when using `INLINE` disposition. When using
-    `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
-    URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
-    `external_links` array prepares the API to return multiple links in a single response. Currently
-    only a single link is returned.)"""
-
-    statement_id: Optional[str] = None
-    """The statement ID is returned upon successfully submitting a SQL statement, and is a required
-    reference for all subsequent calls."""
-
-    status: Optional[StatementStatus] = None
-    """The status response includes execution state and if relevant, error information."""
-
-    def as_dict(self) -> dict:
-        """Serializes the GetStatementResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.manifest: body['manifest'] = self.manifest.as_dict()
-        if self.result: body['result'] = self.result.as_dict()
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
-        if self.status: body['status'] = self.status.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> GetStatementResponse:
-        """Deserializes the GetStatementResponse from a dictionary."""
-        return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
-                   result=_from_dict(d, 'result', ResultData),
-                   statement_id=d.get('statement_id', None),
-                   status=_from_dict(d, 'status', StatementStatus))
-
-
 @dataclass
 class GetWarehousePermissionLevelsResponse:
     permission_levels: Optional[List[WarehousePermissionsDescription]] = None
@@ -1987,100 +2382,614 @@ class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum):
     PASSTHROUGH = 'PASSTHROUGH'
 
 
-class ListOrder(Enum):
-
-    CREATED_AT = 'created_at'
-    NAME = 'name'
-
-
 @dataclass
-class ListQueriesResponse:
-    has_next_page: Optional[bool] = None
-    """Whether there is another page of results."""
+class LegacyAlert:
+    created_at: Optional[str] = None
+    """Timestamp when the alert was created."""
 
-    next_page_token: Optional[str] = None
-    """A token that can be used to get the next page of results."""
+    id: Optional[str] = None
+    """Alert ID."""
 
-    res: Optional[List[QueryInfo]] = None
+    last_triggered_at: Optional[str] = None
+    """Timestamp when the alert was last triggered."""
 
-    def as_dict(self) -> dict:
-        """Serializes the ListQueriesResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.has_next_page is not None: body['has_next_page'] = self.has_next_page
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.res: body['res'] = [v.as_dict() for v in self.res]
-        return body
+    name: Optional[str] = None
+    """Name of the alert."""
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ListQueriesResponse:
-        """Deserializes the ListQueriesResponse from a dictionary."""
-        return cls(has_next_page=d.get('has_next_page', None),
-                   next_page_token=d.get('next_page_token', None),
-                   res=_repeated_dict(d, 'res', QueryInfo))
+    options: Optional[AlertOptions] = None
+    """Alert configuration options."""
 
+    parent: Optional[str] = None
+    """The identifier of the workspace folder containing the object."""
 
-@dataclass
-class ListResponse:
-    count: Optional[int] = None
-    """The total number of dashboards."""
+    query: Optional[AlertQuery] = None
 
-    page: Optional[int] = None
-    """The current page being displayed."""
+    rearm: Optional[int] = None
+    """Number of seconds after being triggered before the alert rearms itself and can be triggered
+    again. If `null`, alert will never be triggered again."""
 
-    page_size: Optional[int] = None
-    """The number of dashboards per page."""
+    state: Optional[LegacyAlertState] = None
+    """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
+    and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
 
-    results: Optional[List[Dashboard]] = None
-    """List of dashboards returned."""
+    updated_at: Optional[str] = None
+    """Timestamp when the alert was last updated."""
+
+    user: Optional[User] = None
 
     def as_dict(self) -> dict:
-        """Serializes the ListResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the LegacyAlert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.count is not None: body['count'] = self.count
-        if self.page is not None: body['page'] = self.page
-        if self.page_size is not None: body['page_size'] = self.page_size
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.id is not None: body['id'] = self.id
+        if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options.as_dict()
+        if self.parent is not None: body['parent'] = self.parent
+        if self.query: body['query'] = self.query.as_dict()
+        if self.rearm is not None: body['rearm'] = self.rearm
+        if self.state is not None: body['state'] = self.state.value
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user.as_dict()
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ListResponse:
-        """Deserializes the ListResponse from a dictionary."""
-        return cls(count=d.get('count', None),
-                   page=d.get('page', None),
-                   page_size=d.get('page_size', None),
-                   results=_repeated_dict(d, 'results', Dashboard))
-
+    def from_dict(cls, d: Dict[str, any]) -> LegacyAlert:
+        """Deserializes the LegacyAlert from a dictionary."""
+        return cls(created_at=d.get('created_at', None),
+                   id=d.get('id', None),
+                   last_triggered_at=d.get('last_triggered_at', None),
+                   name=d.get('name', None),
+                   options=_from_dict(d, 'options', AlertOptions),
+                   parent=d.get('parent', None),
+                   query=_from_dict(d, 'query', AlertQuery),
+                   rearm=d.get('rearm', None),
+                   state=_enum(d, 'state', LegacyAlertState),
+                   updated_at=d.get('updated_at', None),
+                   user=_from_dict(d, 'user', User))
 
-@dataclass
-class ListWarehousesResponse:
-    warehouses: Optional[List[EndpointInfo]] = None
-    """A list of warehouses and their configurations."""
 
-    def as_dict(self) -> dict:
-        """Serializes the ListWarehousesResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.warehouses: body['warehouses'] = [v.as_dict() for v in self.warehouses]
-        return body
+class LegacyAlertState(Enum):
+    """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
+    and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ListWarehousesResponse:
-        """Deserializes the ListWarehousesResponse from a dictionary."""
-        return cls(warehouses=_repeated_dict(d, 'warehouses', EndpointInfo))
+    OK = 'ok'
+    TRIGGERED = 'triggered'
+    UNKNOWN = 'unknown'
 
 
 @dataclass
-class MultiValuesOptions:
-    """If specified, allows multiple values to be selected for this parameter. Only applies to dropdown
-    list and query-based dropdown list parameters."""
-
-    prefix: Optional[str] = None
-    """Character that prefixes each selected parameter value."""
+class LegacyQuery:
+    can_edit: Optional[bool] = None
+    """Describes whether the authenticated user is allowed to edit the definition of this query."""
 
-    separator: Optional[str] = None
-    """Character that separates each selected parameter value. Defaults to a comma."""
+    created_at: Optional[str] = None
+    """The timestamp when this query was created."""
 
-    suffix: Optional[str] = None
-    """Character that suffixes each selected parameter value."""
+    data_source_id: Optional[str] = None
+    """Data source ID maps to the ID of the data source used by the resource and is distinct from the
+    warehouse ID. [Learn more]
+    
+    [Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
+
+    description: Optional[str] = None
+    """General description that conveys additional information about this query such as usage notes."""
+
+    id: Optional[str] = None
+    """Query ID."""
+
+    is_archived: Optional[bool] = None
+    """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
+    in search results. If this boolean is `true`, the `options` property for this query includes a
+    `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
+
+    is_draft: Optional[bool] = None
+    """Whether the query is a draft. Draft queries only appear in list views for their owners.
+    Visualizations from draft queries cannot appear on dashboards."""
+
+    is_favorite: Optional[bool] = None
+    """Whether this query object appears in the current user's favorites list. This flag determines
+    whether the star icon for favorites is selected."""
+
+    is_safe: Optional[bool] = None
+    """Text parameter types are not safe from SQL injection for all types of data source. Set this
+    Boolean parameter to `true` if a query either does not use any text type parameters or uses a
+    data source type where text type parameters are handled safely."""
+
+    last_modified_by: Optional[User] = None
+
+    last_modified_by_id: Optional[int] = None
+    """The ID of the user who last saved changes to this query."""
+
+    latest_query_data_id: Optional[str] = None
+    """If there is a cached result for this query and user, this field includes the query result ID. If
+    this query uses parameters, this field is always null."""
+
+    name: Optional[str] = None
+    """The title of this query that appears in list views, widget headings, and on the query page."""
+
+    options: Optional[QueryOptions] = None
+
+    parent: Optional[str] = None
+    """The identifier of the workspace folder containing the object."""
+
+    permission_tier: Optional[PermissionLevel] = None
+    """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
+    * `CAN_MANAGE`: Can manage the query"""
+
+    query: Optional[str] = None
+    """The text of the query to be run."""
+
+    query_hash: Optional[str] = None
+    """A SHA-256 hash of the query text along with the authenticated user ID."""
+
+    run_as_role: Optional[RunAsRole] = None
+    """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+    viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
+
+    tags: Optional[List[str]] = None
+
+    updated_at: Optional[str] = None
+    """The timestamp at which this query was last updated."""
+
+    user: Optional[User] = None
+
+    user_id: Optional[int] = None
+    """The ID of the user who owns the query."""
+
+    visualizations: Optional[List[LegacyVisualization]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.can_edit is not None: body['can_edit'] = self.can_edit
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.is_archived is not None: body['is_archived'] = self.is_archived
+        if self.is_draft is not None: body['is_draft'] = self.is_draft
+        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
+        if self.is_safe is not None: body['is_safe'] = self.is_safe
+        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
+        if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
+        if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options.as_dict()
+        if self.parent is not None: body['parent'] = self.parent
+        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
+        if self.query is not None: body['query'] = self.query
+        if self.query_hash is not None: body['query_hash'] = self.query_hash
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
+        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user.as_dict()
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
+        """Deserializes the LegacyQuery from a dictionary."""
+        return cls(can_edit=d.get('can_edit', None),
+                   created_at=d.get('created_at', None),
+                   data_source_id=d.get('data_source_id', None),
+                   description=d.get('description', None),
+                   id=d.get('id', None),
+                   is_archived=d.get('is_archived', None),
+                   is_draft=d.get('is_draft', None),
+                   is_favorite=d.get('is_favorite', None),
+                   is_safe=d.get('is_safe', None),
+                   last_modified_by=_from_dict(d, 'last_modified_by', User),
+                   last_modified_by_id=d.get('last_modified_by_id', None),
+                   latest_query_data_id=d.get('latest_query_data_id', None),
+                   name=d.get('name', None),
+                   options=_from_dict(d, 'options', QueryOptions),
+                   parent=d.get('parent', None),
+                   permission_tier=_enum(d, 'permission_tier', PermissionLevel),
+                   query=d.get('query', None),
+                   query_hash=d.get('query_hash', None),
+                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
+                   tags=d.get('tags', None),
+                   updated_at=d.get('updated_at', None),
+                   user=_from_dict(d, 'user', User),
+                   user_id=d.get('user_id', None),
+                   visualizations=_repeated_dict(d, 'visualizations', LegacyVisualization))
+
+
+@dataclass
+class LegacyVisualization:
+    """The visualization description API changes frequently and is unsupported. You can duplicate a
+    visualization by copying description objects received _from the API_ and then using them to
+    create a new one with a POST request to the same endpoint. Databricks does not recommend
+    constructing ad-hoc visualizations entirely in JSON."""
+
+    created_at: Optional[str] = None
+
+    description: Optional[str] = None
+    """A short description of this visualization. This is not displayed in the UI."""
+
+    id: Optional[str] = None
+    """The UUID for this visualization."""
+
+    name: Optional[str] = None
+    """The name of the visualization that appears on dashboards and the query screen."""
+
+    options: Optional[Any] = None
+    """The options object varies widely from one visualization type to the next and is unsupported.
+    Databricks does not recommend modifying visualization settings in JSON."""
+
+    query: Optional[LegacyQuery] = None
+
+    type: Optional[str] = None
+    """The type of visualization: chart, table, pivot table, and so on."""
+
+    updated_at: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the LegacyVisualization into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query: body['query'] = self.query.as_dict()
+        if self.type is not None: body['type'] = self.type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization:
+        """Deserializes the LegacyVisualization from a dictionary."""
+        return cls(created_at=d.get('created_at', None),
+                   description=d.get('description', None),
+                   id=d.get('id', None),
+                   name=d.get('name', None),
+                   options=d.get('options', None),
+                   query=_from_dict(d, 'query', LegacyQuery),
+                   type=d.get('type', None),
+                   updated_at=d.get('updated_at', None))
+
+
+class LifecycleState(Enum):
+
+    ACTIVE = 'ACTIVE'
+    TRASHED = 'TRASHED'
+
+
+@dataclass
+class ListAlertsResponse:
+    next_page_token: Optional[str] = None
+
+    results: Optional[List[ListAlertsResponseAlert]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ListAlertsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse:
+        """Deserializes the ListAlertsResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   results=_repeated_dict(d, 'results', ListAlertsResponseAlert))
+
+
+@dataclass
+class ListAlertsResponseAlert:
+    condition: Optional[AlertCondition] = None
+    """Trigger conditions of the alert."""
+
+    create_time: Optional[str] = None
+    """The timestamp indicating when the alert was created."""
+
+    custom_body: Optional[str] = None
+    """Custom body of alert notification, if it exists. See [here] for custom templating instructions.
+    
+    [here]: https://docs.databricks.com/sql/user/alerts/index.html"""
+
+    custom_subject: Optional[str] = None
+    """Custom subject of alert notification, if it exists. This can include email subject entries and
+    Slack notification headers, for example. See [here] for custom templating instructions.
+    
+    [here]: https://docs.databricks.com/sql/user/alerts/index.html"""
+
+    display_name: Optional[str] = None
+    """The display name of the alert."""
+
+    id: Optional[str] = None
+    """UUID identifying the alert."""
+
+    lifecycle_state: Optional[LifecycleState] = None
+    """The workspace state of the alert. Used for tracking trashed status."""
+
+    notify_on_ok: Optional[bool] = None
+    """Whether to notify alert subscribers when alert returns back to normal."""
+
+    owner_user_name: Optional[str] = None
+    """The owner's username. This field is set to "Unavailable" if the user has been deleted."""
+
+    query_id: Optional[str] = None
+    """UUID of the query attached to the alert."""
+
+    seconds_to_retrigger: Optional[int] = None
+    """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
+    can be triggered again. If 0 or not specified, the alert will not be triggered again."""
+
+    state: Optional[AlertState] = None
+    """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not
+    yet been evaluated or ran into an error during the last evaluation."""
+
+    trigger_time: Optional[str] = None
+    """Timestamp when the alert was last triggered, if the alert has been triggered before."""
+
+    update_time: Optional[str] = None
+    """The timestamp indicating when the alert was updated."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListAlertsResponseAlert into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.condition: body['condition'] = self.condition.as_dict()
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.state is not None: body['state'] = self.state.value
+        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert:
+        """Deserializes the ListAlertsResponseAlert from a dictionary."""
+        return cls(condition=_from_dict(d, 'condition', AlertCondition),
+                   create_time=d.get('create_time', None),
+                   custom_body=d.get('custom_body', None),
+                   custom_subject=d.get('custom_subject', None),
+                   display_name=d.get('display_name', None),
+                   id=d.get('id', None),
+                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
+                   notify_on_ok=d.get('notify_on_ok', None),
+                   owner_user_name=d.get('owner_user_name', None),
+                   query_id=d.get('query_id', None),
+                   seconds_to_retrigger=d.get('seconds_to_retrigger', None),
+                   state=_enum(d, 'state', AlertState),
+                   trigger_time=d.get('trigger_time', None),
+                   update_time=d.get('update_time', None))
+
+
+class ListOrder(Enum):
+
+    CREATED_AT = 'created_at'
+    NAME = 'name'
+
+
+@dataclass
+class ListQueriesResponse:
+    has_next_page: Optional[bool] = None
+    """Whether there is another page of results."""
+
+    next_page_token: Optional[str] = None
+    """A token that can be used to get the next page of results."""
+
+    res: Optional[List[QueryInfo]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ListQueriesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.has_next_page is not None: body['has_next_page'] = self.has_next_page
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.res: body['res'] = [v.as_dict() for v in self.res]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListQueriesResponse:
+        """Deserializes the ListQueriesResponse from a dictionary."""
+        return cls(has_next_page=d.get('has_next_page', None),
+                   next_page_token=d.get('next_page_token', None),
+                   res=_repeated_dict(d, 'res', QueryInfo))
+
+
+@dataclass
+class ListQueryObjectsResponse:
+    next_page_token: Optional[str] = None
+
+    results: Optional[List[ListQueryObjectsResponseQuery]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ListQueryObjectsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse:
+        """Deserializes the ListQueryObjectsResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   results=_repeated_dict(d, 'results', ListQueryObjectsResponseQuery))
+
+
+@dataclass
+class ListQueryObjectsResponseQuery:
+    apply_auto_limit: Optional[bool] = None
+    """Whether to apply a 1000 row limit to the query result."""
+
+    catalog: Optional[str] = None
+    """Name of the catalog where this query will be executed."""
+
+    create_time: Optional[str] = None
+    """Timestamp when this query was created."""
+
+    description: Optional[str] = None
+    """General description that conveys additional information about this query such as usage notes."""
+
+    display_name: Optional[str] = None
+    """Display name of the query that appears in list views, widget headings, and on the query page."""
+
+    id: Optional[str] = None
+    """UUID identifying the query."""
+
+    last_modifier_user_name: Optional[str] = None
+    """Username of the user who last saved changes to this query."""
+
+    lifecycle_state: Optional[LifecycleState] = None
+    """Indicates whether the query is trashed."""
+
+    owner_user_name: Optional[str] = None
+    """Username of the user that owns the query."""
+
+    parameters: Optional[List[QueryParameter]] = None
+    """List of query parameter definitions."""
+
+    query_text: Optional[str] = None
+    """Text of the query to be run."""
+
+    run_as_mode: Optional[RunAsMode] = None
+    """Sets the "Run as" role for the object."""
+
+    schema: Optional[str] = None
+    """Name of the schema where this query will be executed."""
+
+    tags: Optional[List[str]] = None
+
+    update_time: Optional[str] = None
+    """Timestamp when this query was last updated."""
+
+    warehouse_id: Optional[str] = None
+    """ID of the SQL warehouse attached to the query."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListQueryObjectsResponseQuery into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.last_modifier_user_name is not None:
+            body['last_modifier_user_name'] = self.last_modifier_user_name
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery:
+        """Deserializes the ListQueryObjectsResponseQuery from a dictionary."""
+        return cls(apply_auto_limit=d.get('apply_auto_limit', None),
+                   catalog=d.get('catalog', None),
+                   create_time=d.get('create_time', None),
+                   description=d.get('description', None),
+                   display_name=d.get('display_name', None),
+                   id=d.get('id', None),
+                   last_modifier_user_name=d.get('last_modifier_user_name', None),
+                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
+                   owner_user_name=d.get('owner_user_name', None),
+                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
+                   query_text=d.get('query_text', None),
+                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
+                   schema=d.get('schema', None),
+                   tags=d.get('tags', None),
+                   update_time=d.get('update_time', None),
+                   warehouse_id=d.get('warehouse_id', None))
+
+
+@dataclass
+class ListResponse:
+    count: Optional[int] = None
+    """The total number of dashboards."""
+
+    page: Optional[int] = None
+    """The current page being displayed."""
+
+    page_size: Optional[int] = None
+    """The number of dashboards per page."""
+
+    results: Optional[List[Dashboard]] = None
+    """List of dashboards returned."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.count is not None: body['count'] = self.count
+        if self.page is not None: body['page'] = self.page
+        if self.page_size is not None: body['page_size'] = self.page_size
+        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListResponse:
+        """Deserializes the ListResponse from a dictionary."""
+        return cls(count=d.get('count', None),
+                   page=d.get('page', None),
+                   page_size=d.get('page_size', None),
+                   results=_repeated_dict(d, 'results', Dashboard))
+
+
+@dataclass
+class ListVisualizationsForQueryResponse:
+    next_page_token: Optional[str] = None
+
+    results: Optional[List[Visualization]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ListVisualizationsForQueryResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse:
+        """Deserializes the ListVisualizationsForQueryResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   results=_repeated_dict(d, 'results', Visualization))
+
+
+@dataclass
+class ListWarehousesResponse:
+    warehouses: Optional[List[EndpointInfo]] = None
+    """A list of warehouses and their configurations."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListWarehousesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.warehouses: body['warehouses'] = [v.as_dict() for v in self.warehouses]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListWarehousesResponse:
+        """Deserializes the ListWarehousesResponse from a dictionary."""
+        return cls(warehouses=_repeated_dict(d, 'warehouses', EndpointInfo))
+
+
+@dataclass
+class MultiValuesOptions:
+    prefix: Optional[str] = None
+    """Character that prefixes each selected parameter value."""
+
+    separator: Optional[str] = None
+    """Character that separates each selected parameter value. Defaults to a comma."""
+
+    suffix: Optional[str] = None
+    """Character that suffixes each selected parameter value."""
 
     def as_dict(self) -> dict:
         """Serializes the MultiValuesOptions into a dictionary suitable for use as a JSON request body."""
@@ -2098,6 +3007,22 @@ def from_dict(cls, d: Dict[str, any]) -> MultiValuesOptions:
                    suffix=d.get('suffix', None))
 
 
+@dataclass
+class NumericValue:
+    value: Optional[float] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the NumericValue into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> NumericValue:
+        """Deserializes the NumericValue from a dictionary."""
+        return cls(value=d.get('value', None))
+
+
 class ObjectType(Enum):
     """A singular noun object type."""
 
@@ -2222,7 +3147,7 @@ class PermissionLevel(Enum):
 
 
 class PlansState(Enum):
-    """Whether plans exist for the execution, or the reason why they are missing"""
+    """Possible Reasons for which we have not saved plans in the database"""
 
     EMPTY = 'EMPTY'
     EXISTS = 'EXISTS'
@@ -2234,141 +3159,126 @@ class PlansState(Enum):
 
 @dataclass
 class Query:
-    can_edit: Optional[bool] = None
-    """Describes whether the authenticated user is allowed to edit the definition of this query."""
+    apply_auto_limit: Optional[bool] = None
+    """Whether to apply a 1000 row limit to the query result."""
 
-    created_at: Optional[str] = None
-    """The timestamp when this query was created."""
+    catalog: Optional[str] = None
+    """Name of the catalog where this query will be executed."""
 
-    data_source_id: Optional[str] = None
-    """Data source ID maps to the ID of the data source used by the resource and is distinct from the
-    warehouse ID. [Learn more]
-    
-    [Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
+    create_time: Optional[str] = None
+    """Timestamp when this query was created."""
 
     description: Optional[str] = None
     """General description that conveys additional information about this query such as usage notes."""
 
-    id: Optional[str] = None
-    """Query ID."""
-
-    is_archived: Optional[bool] = None
-    """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
-    in search results. If this boolean is `true`, the `options` property for this query includes a
-    `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
-
-    is_draft: Optional[bool] = None
-    """Whether the query is a draft. Draft queries only appear in list views for their owners.
-    Visualizations from draft queries cannot appear on dashboards."""
-
-    is_favorite: Optional[bool] = None
-    """Whether this query object appears in the current user's favorites list. This flag determines
-    whether the star icon for favorites is selected."""
-
-    is_safe: Optional[bool] = None
-    """Text parameter types are not safe from SQL injection for all types of data source. Set this
-    Boolean parameter to `true` if a query either does not use any text type parameters or uses a
-    data source type where text type parameters are handled safely."""
-
-    last_modified_by: Optional[User] = None
+    display_name: Optional[str] = None
+    """Display name of the query that appears in list views, widget headings, and on the query page."""
 
-    last_modified_by_id: Optional[int] = None
-    """The ID of the user who last saved changes to this query."""
+    id: Optional[str] = None
+    """UUID identifying the query."""
 
-    latest_query_data_id: Optional[str] = None
-    """If there is a cached result for this query and user, this field includes the query result ID. If
-    this query uses parameters, this field is always null."""
+    last_modifier_user_name: Optional[str] = None
+    """Username of the user who last saved changes to this query."""
 
-    name: Optional[str] = None
-    """The title of this query that appears in list views, widget headings, and on the query page."""
+    lifecycle_state: Optional[LifecycleState] = None
+    """Indicates whether the query is trashed."""
 
-    options: Optional[QueryOptions] = None
+    owner_user_name: Optional[str] = None
+    """Username of the user that owns the query."""
 
-    parent: Optional[str] = None
-    """The identifier of the workspace folder containing the object."""
+    parameters: Optional[List[QueryParameter]] = None
+    """List of query parameter definitions."""
 
-    permission_tier: Optional[PermissionLevel] = None
-    """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
-    * `CAN_MANAGE`: Can manage the query"""
+    parent_path: Optional[str] = None
+    """Workspace path of the workspace folder containing the object."""
 
-    query: Optional[str] = None
-    """The text of the query to be run."""
+    query_text: Optional[str] = None
+    """Text of the query to be run."""
 
-    query_hash: Optional[str] = None
-    """A SHA-256 hash of the query text along with the authenticated user ID."""
+    run_as_mode: Optional[RunAsMode] = None
+    """Sets the "Run as" role for the object."""
 
-    run_as_role: Optional[RunAsRole] = None
-    """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-    viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
+    schema: Optional[str] = None
+    """Name of the schema where this query will be executed."""
 
     tags: Optional[List[str]] = None
 
-    updated_at: Optional[str] = None
-    """The timestamp at which this query was last updated."""
-
-    user: Optional[User] = None
-
-    user_id: Optional[int] = None
-    """The ID of the user who owns the query."""
+    update_time: Optional[str] = None
+    """Timestamp when this query was last updated."""
 
-    visualizations: Optional[List[Visualization]] = None
+    warehouse_id: Optional[str] = None
+    """ID of the SQL warehouse attached to the query."""
 
     def as_dict(self) -> dict:
         """Serializes the Query into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.can_edit is not None: body['can_edit'] = self.can_edit
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.create_time is not None: body['create_time'] = self.create_time
         if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
         if self.id is not None: body['id'] = self.id
-        if self.is_archived is not None: body['is_archived'] = self.is_archived
-        if self.is_draft is not None: body['is_draft'] = self.is_draft
-        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
-        if self.is_safe is not None: body['is_safe'] = self.is_safe
-        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
-        if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
-        if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.parent is not None: body['parent'] = self.parent
-        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
-        if self.query is not None: body['query'] = self.query
-        if self.query_hash is not None: body['query_hash'] = self.query_hash
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
+        if self.last_modifier_user_name is not None:
+            body['last_modifier_user_name'] = self.last_modifier_user_name
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
+        if self.schema is not None: body['schema'] = self.schema
         if self.tags: body['tags'] = [v for v in self.tags]
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user.as_dict()
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Query:
         """Deserializes the Query from a dictionary."""
-        return cls(can_edit=d.get('can_edit', None),
-                   created_at=d.get('created_at', None),
-                   data_source_id=d.get('data_source_id', None),
+        return cls(apply_auto_limit=d.get('apply_auto_limit', None),
+                   catalog=d.get('catalog', None),
+                   create_time=d.get('create_time', None),
                    description=d.get('description', None),
+                   display_name=d.get('display_name', None),
                    id=d.get('id', None),
-                   is_archived=d.get('is_archived', None),
-                   is_draft=d.get('is_draft', None),
-                   is_favorite=d.get('is_favorite', None),
-                   is_safe=d.get('is_safe', None),
-                   last_modified_by=_from_dict(d, 'last_modified_by', User),
-                   last_modified_by_id=d.get('last_modified_by_id', None),
-                   latest_query_data_id=d.get('latest_query_data_id', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', QueryOptions),
-                   parent=d.get('parent', None),
-                   permission_tier=_enum(d, 'permission_tier', PermissionLevel),
-                   query=d.get('query', None),
-                   query_hash=d.get('query_hash', None),
-                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
-                   tags=d.get('tags', None),
-                   updated_at=d.get('updated_at', None),
-                   user=_from_dict(d, 'user', User),
-                   user_id=d.get('user_id', None),
-                   visualizations=_repeated_dict(d, 'visualizations', Visualization))
+                   last_modifier_user_name=d.get('last_modifier_user_name', None),
+                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
+                   owner_user_name=d.get('owner_user_name', None),
+                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
+                   parent_path=d.get('parent_path', None),
+                   query_text=d.get('query_text', None),
+                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
+                   schema=d.get('schema', None),
+                   tags=d.get('tags', None),
+                   update_time=d.get('update_time', None),
+                   warehouse_id=d.get('warehouse_id', None))
+
+
+@dataclass
+class QueryBackedValue:
+    multi_values_options: Optional[MultiValuesOptions] = None
+    """If specified, allows multiple values to be selected for this parameter."""
+
+    query_id: Optional[str] = None
+    """UUID of the query that provides the parameter values."""
+
+    values: Optional[List[str]] = None
+    """List of selected query parameter values."""
+
+    def as_dict(self) -> dict:
+        """Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict()
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.values: body['values'] = [v for v in self.values]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue:
+        """Deserializes the QueryBackedValue from a dictionary."""
+        return cls(multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions),
+                   query_id=d.get('query_id', None),
+                   values=d.get('values', None))
 
 
 @dataclass
@@ -2429,9 +3339,8 @@ def from_dict(cls, d: Dict[str, any]) -> QueryEditContent:
 
 @dataclass
 class QueryFilter:
-    """A filter to limit query history results. This field is optional."""
-
     query_start_time_range: Optional[TimeRange] = None
+    """A range filter for query submitted time. The time range must be <= 30 days."""
 
     statement_ids: Optional[List[str]] = None
     """A list of statement IDs."""
@@ -2466,11 +3375,8 @@ def from_dict(cls, d: Dict[str, any]) -> QueryFilter:
 
 @dataclass
 class QueryInfo:
-    can_subscribe_to_live_query: Optional[bool] = None
-    """Reserved for internal use."""
-
     channel_used: Optional[ChannelInfo] = None
-    """Channel information for the SQL warehouse at the time of query execution"""
+    """SQL Warehouse channel information at the time of query execution"""
 
     duration: Optional[int] = None
     """Total execution time of the statement ( excluding result fetch time )."""
@@ -2518,15 +3424,17 @@ class QueryInfo:
     """The number of results returned by the query."""
 
     spark_ui_url: Optional[str] = None
-    """URL to the query plan."""
+    """URL to the Spark UI query plan."""
 
     statement_type: Optional[QueryStatementType] = None
     """Type of statement for this query"""
 
     status: Optional[QueryStatus] = None
-    """Query status with one the following values: * `QUEUED`: Query has been received and queued. *
-    `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`:
-    Query has failed. * `FINISHED`: Query has completed."""
+    """Query status with one the following values:
+    
+    - `QUEUED`: Query has been received and queued. - `RUNNING`: Query has started. - `CANCELED`:
+    Query has been cancelled by the user. - `FAILED`: Query has failed. - `FINISHED`: Query has
+    completed."""
 
     user_id: Optional[int] = None
     """The ID of the user who ran the query."""
@@ -2540,8 +3448,6 @@ class QueryInfo:
     def as_dict(self) -> dict:
         """Serializes the QueryInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.can_subscribe_to_live_query is not None:
-            body['canSubscribeToLiveQuery'] = self.can_subscribe_to_live_query
         if self.channel_used: body['channel_used'] = self.channel_used.as_dict()
         if self.duration is not None: body['duration'] = self.duration
         if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
@@ -2569,8 +3475,7 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryInfo:
         """Deserializes the QueryInfo from a dictionary."""
-        return cls(can_subscribe_to_live_query=d.get('canSubscribeToLiveQuery', None),
-                   channel_used=_from_dict(d, 'channel_used', ChannelInfo),
+        return cls(channel_used=_from_dict(d, 'channel_used', ChannelInfo),
                    duration=d.get('duration', None),
                    endpoint_id=d.get('endpoint_id', None),
                    error_message=d.get('error_message', None),
@@ -2605,7 +3510,7 @@ class QueryList:
     page_size: Optional[int] = None
     """The number of queries per page."""
 
-    results: Optional[List[Query]] = None
+    results: Optional[List[LegacyQuery]] = None
     """List of queries returned."""
 
     def as_dict(self) -> dict:
@@ -2623,12 +3528,13 @@ def from_dict(cls, d: Dict[str, any]) -> QueryList:
         return cls(count=d.get('count', None),
                    page=d.get('page', None),
                    page_size=d.get('page_size', None),
-                   results=_repeated_dict(d, 'results', Query))
+                   results=_repeated_dict(d, 'results', LegacyQuery))
 
 
 @dataclass
 class QueryMetrics:
-    """Metrics about query execution."""
+    """A query metric that encapsulates a set of measurements for a single query. Metrics come from the
+    driver and are stored in the history service database."""
 
     compilation_time_ms: Optional[int] = None
     """Time spent loading metadata and optimizing the query, in milliseconds."""
@@ -2636,9 +3542,6 @@ class QueryMetrics:
     execution_time_ms: Optional[int] = None
     """Time spent executing the query, in milliseconds."""
 
-    metadata_time_ms: Optional[int] = None
-    """Reserved for internal use."""
-
     network_sent_bytes: Optional[int] = None
     """Total amount of data sent over the network between executor nodes during shuffle, in bytes."""
 
@@ -2649,9 +3552,6 @@ class QueryMetrics:
     photon_total_time_ms: Optional[int] = None
     """Total execution time for all individual Photon query engine tasks in the query, in milliseconds."""
 
-    planning_time_ms: Optional[int] = None
-    """Reserved for internal use."""
-
     provisioning_queue_start_timestamp: Optional[int] = None
     """Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the
     warehouse. This field is optional and will not appear if the query skipped the provisioning
@@ -2666,9 +3566,6 @@ class QueryMetrics:
     query_compilation_start_timestamp: Optional[int] = None
     """Timestamp of when the underlying compute started compilation of the query."""
 
-    query_execution_time_ms: Optional[int] = None
-    """Reserved for internal use."""
-
     read_bytes: Optional[int] = None
     """Total size of data read by the query, in bytes."""
 
@@ -2676,7 +3573,7 @@ class QueryMetrics:
     """Size of persistent data read from the cache, in bytes."""
 
     read_files_count: Optional[int] = None
-    """Number of files read after pruning."""
+    """Number of files read after pruning"""
 
     read_partitions_count: Optional[int] = None
     """Number of partitions read after pruning."""
@@ -2688,7 +3585,7 @@ class QueryMetrics:
     """Time spent fetching the query results after the execution finished, in milliseconds."""
 
     result_from_cache: Optional[bool] = None
-    """true if the query result was fetched from cache, false otherwise."""
+    """`true` if the query result was fetched from cache, `false` otherwise."""
 
     rows_produced_count: Optional[int] = None
     """Total number of rows returned by the query."""
@@ -2713,20 +3610,16 @@ def as_dict(self) -> dict:
         body = {}
         if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
         if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
-        if self.metadata_time_ms is not None: body['metadata_time_ms'] = self.metadata_time_ms
         if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes
         if self.overloading_queue_start_timestamp is not None:
             body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp
         if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms
-        if self.planning_time_ms is not None: body['planning_time_ms'] = self.planning_time_ms
         if self.provisioning_queue_start_timestamp is not None:
             body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp
         if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes
         if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count
         if self.query_compilation_start_timestamp is not None:
             body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp
-        if self.query_execution_time_ms is not None:
-            body['query_execution_time_ms'] = self.query_execution_time_ms
         if self.read_bytes is not None: body['read_bytes'] = self.read_bytes
         if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes
         if self.read_files_count is not None: body['read_files_count'] = self.read_files_count
@@ -2747,16 +3640,13 @@ def from_dict(cls, d: Dict[str, any]) -> QueryMetrics:
         """Deserializes the QueryMetrics from a dictionary."""
         return cls(compilation_time_ms=d.get('compilation_time_ms', None),
                    execution_time_ms=d.get('execution_time_ms', None),
-                   metadata_time_ms=d.get('metadata_time_ms', None),
                    network_sent_bytes=d.get('network_sent_bytes', None),
                    overloading_queue_start_timestamp=d.get('overloading_queue_start_timestamp', None),
                    photon_total_time_ms=d.get('photon_total_time_ms', None),
-                   planning_time_ms=d.get('planning_time_ms', None),
                    provisioning_queue_start_timestamp=d.get('provisioning_queue_start_timestamp', None),
                    pruned_bytes=d.get('pruned_bytes', None),
                    pruned_files_count=d.get('pruned_files_count', None),
                    query_compilation_start_timestamp=d.get('query_compilation_start_timestamp', None),
-                   query_execution_time_ms=d.get('query_execution_time_ms', None),
                    read_bytes=d.get('read_bytes', None),
                    read_cache_bytes=d.get('read_cache_bytes', None),
                    read_files_count=d.get('read_files_count', None),
@@ -2804,6 +3694,59 @@ def from_dict(cls, d: Dict[str, any]) -> QueryOptions:
                    schema=d.get('schema', None))
 
 
+@dataclass
+class QueryParameter:
+    date_range_value: Optional[DateRangeValue] = None
+    """Date-range query parameter value. Can only specify one of `dynamic_date_range_value` or
+    `date_range_value`."""
+
+    date_value: Optional[DateValue] = None
+    """Date query parameter value. Can only specify one of `dynamic_date_value` or `date_value`."""
+
+    enum_value: Optional[EnumValue] = None
+    """Dropdown query parameter value."""
+
+    name: Optional[str] = None
+    """Literal parameter marker that appears between double curly braces in the query text."""
+
+    numeric_value: Optional[NumericValue] = None
+    """Numeric query parameter value."""
+
+    query_backed_value: Optional[QueryBackedValue] = None
+    """Query-based dropdown query parameter value."""
+
+    text_value: Optional[TextValue] = None
+    """Text query parameter value."""
+
+    title: Optional[str] = None
+    """Text displayed in the user-facing parameter widget in the UI."""
+
+    def as_dict(self) -> dict:
+        """Serializes the QueryParameter into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict()
+        if self.date_value: body['date_value'] = self.date_value.as_dict()
+        if self.enum_value: body['enum_value'] = self.enum_value.as_dict()
+        if self.name is not None: body['name'] = self.name
+        if self.numeric_value: body['numeric_value'] = self.numeric_value.as_dict()
+        if self.query_backed_value: body['query_backed_value'] = self.query_backed_value.as_dict()
+        if self.text_value: body['text_value'] = self.text_value.as_dict()
+        if self.title is not None: body['title'] = self.title
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QueryParameter:
+        """Deserializes the QueryParameter from a dictionary."""
+        return cls(date_range_value=_from_dict(d, 'date_range_value', DateRangeValue),
+                   date_value=_from_dict(d, 'date_value', DateValue),
+                   enum_value=_from_dict(d, 'enum_value', EnumValue),
+                   name=d.get('name', None),
+                   numeric_value=_from_dict(d, 'numeric_value', NumericValue),
+                   query_backed_value=_from_dict(d, 'query_backed_value', QueryBackedValue),
+                   text_value=_from_dict(d, 'text_value', TextValue),
+                   title=d.get('title', None))
+
+
 @dataclass
 class QueryPostContent:
     data_source_id: Optional[str] = None
@@ -2862,7 +3805,6 @@ def from_dict(cls, d: Dict[str, any]) -> QueryPostContent:
 
 
 class QueryStatementType(Enum):
-    """Type of statement for this query"""
 
     ALTER = 'ALTER'
     ANALYZE = 'ANALYZE'
@@ -2889,15 +3831,16 @@ class QueryStatementType(Enum):
 
 
 class QueryStatus(Enum):
-    """Query status with one the following values: * `QUEUED`: Query has been received and queued. *
-    `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`:
-    Query has failed. * `FINISHED`: Query has completed."""
+    """Statuses which are also used by OperationStatus in runtime"""
 
     CANCELED = 'CANCELED'
+    COMPILED = 'COMPILED'
+    COMPILING = 'COMPILING'
     FAILED = 'FAILED'
     FINISHED = 'FINISHED'
     QUEUED = 'QUEUED'
     RUNNING = 'RUNNING'
+    STARTED = 'STARTED'
 
 
 @dataclass
@@ -2938,12 +3881,6 @@ def from_dict(cls, d: Dict[str, any]) -> RestoreResponse:
 
 @dataclass
 class ResultData:
-    """Contains the result data of a single chunk when using `INLINE` disposition. When using
-    `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned
-    URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the
-    `external_links` array prepares the API to return multiple links in a single response. Currently
-    only a single link is returned.)"""
-
     byte_count: Optional[int] = None
     """The number of bytes in the result chunk. This field is not available when using `INLINE`
     disposition."""
@@ -3070,6 +4007,12 @@ def from_dict(cls, d: Dict[str, any]) -> ResultSchema:
         return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo))
 
 
+class RunAsMode(Enum):
+
+    OWNER = 'OWNER'
+    VIEWER = 'VIEWER'
+
+
 class RunAsRole(Enum):
     """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
     viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
@@ -3296,6 +4239,38 @@ def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem:
         return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None))
 
 
+@dataclass
+class StatementResponse:
+    manifest: Optional[ResultManifest] = None
+    """The result manifest provides schema and metadata for the result set."""
+
+    result: Optional[ResultData] = None
+
+    statement_id: Optional[str] = None
+    """The statement ID is returned upon successfully submitting a SQL statement, and is a required
+    reference for all subsequent calls."""
+
+    status: Optional[StatementStatus] = None
+    """The status response includes execution state and if relevant, error information."""
+
+    def as_dict(self) -> dict:
+        """Serializes the StatementResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.manifest: body['manifest'] = self.manifest.as_dict()
+        if self.result: body['result'] = self.result.as_dict()
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.status: body['status'] = self.status.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> StatementResponse:
+        """Deserializes the StatementResponse from a dictionary."""
+        return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
+                   result=_from_dict(d, 'result', ResultData),
+                   statement_id=d.get('statement_id', None),
+                   status=_from_dict(d, 'status', StatementStatus))
+
+
 class StatementState(Enum):
     """Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running -
     `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution
@@ -3501,13 +4476,29 @@ class TerminationReasonType(Enum):
     SUCCESS = 'SUCCESS'
 
 
+@dataclass
+class TextValue:
+    value: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the TextValue into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> TextValue:
+        """Deserializes the TextValue from a dictionary."""
+        return cls(value=d.get('value', None))
+
+
 @dataclass
 class TimeRange:
     end_time_ms: Optional[int] = None
-    """Limit results to queries that started before this time."""
+    """The end time in milliseconds."""
 
     start_time_ms: Optional[int] = None
-    """Limit results to queries that started after this time."""
+    """The start time in milliseconds."""
 
     def as_dict(self) -> dict:
         """Serializes the TimeRange into a dictionary suitable for use as a JSON request body."""
@@ -3539,6 +4530,184 @@ def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId:
         return cls(new_owner=d.get('new_owner', None))
 
 
+@dataclass
+class UpdateAlertRequest:
+    update_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    alert: Optional[UpdateAlertRequestAlert] = None
+
+    id: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.alert: body['alert'] = self.alert.as_dict()
+        if self.id is not None: body['id'] = self.id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest:
+        """Deserializes the UpdateAlertRequest from a dictionary."""
+        return cls(alert=_from_dict(d, 'alert', UpdateAlertRequestAlert),
+                   id=d.get('id', None),
+                   update_mask=d.get('update_mask', None))
+
+
+@dataclass
+class UpdateAlertRequestAlert:
+    condition: Optional[AlertCondition] = None
+    """Trigger conditions of the alert."""
+
+    custom_body: Optional[str] = None
+    """Custom body of alert notification, if it exists. See [here] for custom templating instructions.
+    
+    [here]: https://docs.databricks.com/sql/user/alerts/index.html"""
+
+    custom_subject: Optional[str] = None
+    """Custom subject of alert notification, if it exists. This can include email subject entries and
+    Slack notification headers, for example. See [here] for custom templating instructions.
+    
+    [here]: https://docs.databricks.com/sql/user/alerts/index.html"""
+
+    display_name: Optional[str] = None
+    """The display name of the alert."""
+
+    notify_on_ok: Optional[bool] = None
+    """Whether to notify alert subscribers when alert returns back to normal."""
+
+    owner_user_name: Optional[str] = None
+    """The owner's username. This field is set to "Unavailable" if the user has been deleted."""
+
+    query_id: Optional[str] = None
+    """UUID of the query attached to the alert."""
+
+    seconds_to_retrigger: Optional[int] = None
+    """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
+    can be triggered again. If 0 or not specified, the alert will not be triggered again."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAlertRequestAlert into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.condition: body['condition'] = self.condition.as_dict()
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
+        """Deserializes the UpdateAlertRequestAlert from a dictionary."""
+        return cls(condition=_from_dict(d, 'condition', AlertCondition),
+                   custom_body=d.get('custom_body', None),
+                   custom_subject=d.get('custom_subject', None),
+                   display_name=d.get('display_name', None),
+                   notify_on_ok=d.get('notify_on_ok', None),
+                   owner_user_name=d.get('owner_user_name', None),
+                   query_id=d.get('query_id', None),
+                   seconds_to_retrigger=d.get('seconds_to_retrigger', None))
+
+
+@dataclass
+class UpdateQueryRequest:
+    update_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    id: Optional[str] = None
+
+    query: Optional[UpdateQueryRequestQuery] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.query: body['query'] = self.query.as_dict()
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest:
+        """Deserializes the UpdateQueryRequest from a dictionary."""
+        return cls(id=d.get('id', None),
+                   query=_from_dict(d, 'query', UpdateQueryRequestQuery),
+                   update_mask=d.get('update_mask', None))
+
+
+@dataclass
+class UpdateQueryRequestQuery:
+    apply_auto_limit: Optional[bool] = None
+    """Whether to apply a 1000 row limit to the query result."""
+
+    catalog: Optional[str] = None
+    """Name of the catalog where this query will be executed."""
+
+    description: Optional[str] = None
+    """General description that conveys additional information about this query such as usage notes."""
+
+    display_name: Optional[str] = None
+    """Display name of the query that appears in list views, widget headings, and on the query page."""
+
+    owner_user_name: Optional[str] = None
+    """Username of the user that owns the query."""
+
+    parameters: Optional[List[QueryParameter]] = None
+    """List of query parameter definitions."""
+
+    query_text: Optional[str] = None
+    """Text of the query to be run."""
+
+    run_as_mode: Optional[RunAsMode] = None
+    """Sets the "Run as" role for the object."""
+
+    schema: Optional[str] = None
+    """Name of the schema where this query will be executed."""
+
+    tags: Optional[List[str]] = None
+
+    warehouse_id: Optional[str] = None
+    """ID of the SQL warehouse attached to the query."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateQueryRequestQuery into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery:
+        """Deserializes the UpdateQueryRequestQuery from a dictionary."""
+        return cls(apply_auto_limit=d.get('apply_auto_limit', None),
+                   catalog=d.get('catalog', None),
+                   description=d.get('description', None),
+                   display_name=d.get('display_name', None),
+                   owner_user_name=d.get('owner_user_name', None),
+                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
+                   query_text=d.get('query_text', None),
+                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
+                   schema=d.get('schema', None),
+                   tags=d.get('tags', None),
+                   warehouse_id=d.get('warehouse_id', None))
+
+
 @dataclass
 class UpdateResponse:
 
@@ -3553,6 +4722,67 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         return cls()
 
 
+@dataclass
+class UpdateVisualizationRequest:
+    update_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    id: Optional[str] = None
+
+    visualization: Optional[UpdateVisualizationRequestVisualization] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.visualization: body['visualization'] = self.visualization.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest:
+        """Deserializes the UpdateVisualizationRequest from a dictionary."""
+        return cls(id=d.get('id', None),
+                   update_mask=d.get('update_mask', None),
+                   visualization=_from_dict(d, 'visualization', UpdateVisualizationRequestVisualization))
+
+
+@dataclass
+class UpdateVisualizationRequestVisualization:
+    display_name: Optional[str] = None
+    """The display name of the visualization."""
+
+    serialized_options: Optional[str] = None
+    """The visualization options varies widely from one visualization type to the next and is
+    unsupported. Databricks does not recommend modifying visualization options directly."""
+
+    serialized_query_plan: Optional[str] = None
+    """The visualization query plan varies widely from one visualization type to the next and is
+    unsupported. Databricks does not recommend modifying the visualization query plan directly."""
+
+    type: Optional[str] = None
+    """The type of visualization: counter, table, funnel, and so on."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
+        if self.type is not None: body['type'] = self.type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization:
+        """Deserializes the UpdateVisualizationRequestVisualization from a dictionary."""
+        return cls(display_name=d.get('display_name', None),
+                   serialized_options=d.get('serialized_options', None),
+                   serialized_query_plan=d.get('serialized_query_plan', None),
+                   type=d.get('type', None))
+
+
 @dataclass
 class User:
     email: Optional[str] = None
@@ -3577,57 +4807,56 @@ def from_dict(cls, d: Dict[str, any]) -> User:
 
 @dataclass
 class Visualization:
-    """The visualization description API changes frequently and is unsupported. You can duplicate a
-    visualization by copying description objects received _from the API_ and then using them to
-    create a new one with a POST request to the same endpoint. Databricks does not recommend
-    constructing ad-hoc visualizations entirely in JSON."""
+    create_time: Optional[str] = None
+    """The timestamp indicating when the visualization was created."""
 
-    created_at: Optional[str] = None
-
-    description: Optional[str] = None
-    """A short description of this visualization. This is not displayed in the UI."""
+    display_name: Optional[str] = None
+    """The display name of the visualization."""
 
     id: Optional[str] = None
-    """The UUID for this visualization."""
+    """UUID identifying the visualization."""
 
-    name: Optional[str] = None
-    """The name of the visualization that appears on dashboards and the query screen."""
+    query_id: Optional[str] = None
+    """UUID of the query that the visualization is attached to."""
 
-    options: Optional[Any] = None
-    """The options object varies widely from one visualization type to the next and is unsupported.
-    Databricks does not recommend modifying visualization settings in JSON."""
+    serialized_options: Optional[str] = None
+    """The visualization options varies widely from one visualization type to the next and is
+    unsupported. Databricks does not recommend modifying visualization options directly."""
 
-    query: Optional[Query] = None
+    serialized_query_plan: Optional[str] = None
+    """The visualization query plan varies widely from one visualization type to the next and is
+    unsupported. Databricks does not recommend modifying the visualization query plan directly."""
 
     type: Optional[str] = None
-    """The type of visualization: chart, table, pivot table, and so on."""
+    """The type of visualization: counter, table, funnel, and so on."""
 
-    updated_at: Optional[str] = None
+    update_time: Optional[str] = None
+    """The timestamp indicating when the visualization was updated."""
 
     def as_dict(self) -> dict:
         """Serializes the Visualization into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.description is not None: body['description'] = self.description
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.display_name is not None: body['display_name'] = self.display_name
         if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.query: body['query'] = self.query.as_dict()
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
         if self.type is not None: body['type'] = self.type
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Visualization:
         """Deserializes the Visualization from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   description=d.get('description', None),
+        return cls(create_time=d.get('create_time', None),
+                   display_name=d.get('display_name', None),
                    id=d.get('id', None),
-                   name=d.get('name', None),
-                   options=d.get('options', None),
-                   query=_from_dict(d, 'query', Query),
+                   query_id=d.get('query_id', None),
+                   serialized_options=d.get('serialized_options', None),
+                   serialized_query_plan=d.get('serialized_query_plan', None),
                    type=d.get('type', None),
-                   updated_at=d.get('updated_at', None))
+                   update_time=d.get('update_time', None))
 
 
 @dataclass
@@ -3730,6 +4959,7 @@ class WarehousePermissionLevel(Enum):
     """Permission level"""
 
     CAN_MANAGE = 'CAN_MANAGE'
+    CAN_MONITOR = 'CAN_MONITOR'
     CAN_USE = 'CAN_USE'
     IS_OWNER = 'IS_OWNER'
 
@@ -3842,7 +5072,7 @@ class Widget:
 
     options: Optional[WidgetOptions] = None
 
-    visualization: Optional[Visualization] = None
+    visualization: Optional[LegacyVisualization] = None
     """The visualization description API changes frequently and is unsupported. You can duplicate a
     visualization by copying description objects received _from the API_ and then using them to
     create a new one with a POST request to the same endpoint. Databricks does not recommend
@@ -3865,7 +5095,7 @@ def from_dict(cls, d: Dict[str, any]) -> Widget:
         """Deserializes the Widget from a dictionary."""
         return cls(id=d.get('id', None),
                    options=_from_dict(d, 'options', WidgetOptions),
-                   visualization=_from_dict(d, 'visualization', Visualization),
+                   visualization=_from_dict(d, 'visualization', LegacyVisualization),
                    width=d.get('width', None))
 
 
@@ -3959,14 +5189,123 @@ def from_dict(cls, d: Dict[str, any]) -> WidgetPosition:
 
 
 class AlertsAPI:
+    """The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
+    periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
+    notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
+    the Jobs API, e.g. :method:jobs/create."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self, *, alert: Optional[CreateAlertRequestAlert] = None) -> Alert:
+        """Create an alert.
+        
+        Creates an alert.
+        
+        :param alert: :class:`CreateAlertRequestAlert` (optional)
+        
+        :returns: :class:`Alert`
+        """
+        body = {}
+        if alert is not None: body['alert'] = alert.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/sql/alerts', body=body, headers=headers)
+        return Alert.from_dict(res)
+
+    def delete(self, id: str):
+        """Delete an alert.
+        
+        Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
+        can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
+        deleted after 30 days.
+        
+        :param id: str
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE', f'/api/2.0/sql/alerts/{id}', headers=headers)
+
+    def get(self, id: str) -> Alert:
+        """Get an alert.
+        
+        Gets an alert.
+        
+        :param id: str
+        
+        :returns: :class:`Alert`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/sql/alerts/{id}', headers=headers)
+        return Alert.from_dict(res)
+
+    def list(self,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[ListAlertsResponseAlert]:
+        """List alerts.
+        
+        Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
+        concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ListAlertsResponseAlert`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.0/sql/alerts', query=query, headers=headers)
+            if 'results' in json:
+                for v in json['results']:
+                    yield ListAlertsResponseAlert.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertRequestAlert] = None) -> Alert:
+        """Update an alert.
+        
+        Updates an alert.
+        
+        :param id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param alert: :class:`UpdateAlertRequestAlert` (optional)
+        
+        :returns: :class:`Alert`
+        """
+        body = {}
+        if alert is not None: body['alert'] = alert.as_dict()
+        if update_mask is not None: body['update_mask'] = update_mask
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH', f'/api/2.0/sql/alerts/{id}', body=body, headers=headers)
+        return Alert.from_dict(res)
+
+
+class AlertsLegacyAPI:
     """The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
     periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
     notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
     the Jobs API, e.g. :method:jobs/create.
     
-    **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
     
-    [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources"""
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -3977,15 +5316,16 @@ def create(self,
                query_id: str,
                *,
                parent: Optional[str] = None,
-               rearm: Optional[int] = None) -> Alert:
+               rearm: Optional[int] = None) -> LegacyAlert:
         """Create an alert.
         
         Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
         condition of its result, and notifies users or notification destinations if the condition was met.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param name: str
           Name of the alert.
@@ -3999,7 +5339,7 @@ def create(self,
           Number of seconds after being triggered before the alert rearms itself and can be triggered again.
           If `null`, alert will never be triggered again.
         
-        :returns: :class:`Alert`
+        :returns: :class:`LegacyAlert`
         """
         body = {}
         if name is not None: body['name'] = name
@@ -4010,7 +5350,7 @@ def create(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/preview/sql/alerts', body=body, headers=headers)
-        return Alert.from_dict(res)
+        return LegacyAlert.from_dict(res)
 
     def delete(self, alert_id: str):
         """Delete an alert.
@@ -4018,9 +5358,10 @@ def delete(self, alert_id: str):
         Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
         queries and dashboards, alerts cannot be moved to the trash.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param alert_id: str
         
@@ -4031,41 +5372,43 @@ def delete(self, alert_id: str):
 
         self._api.do('DELETE', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers)
 
-    def get(self, alert_id: str) -> Alert:
+    def get(self, alert_id: str) -> LegacyAlert:
         """Get an alert.
         
         Gets an alert.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param alert_id: str
         
-        :returns: :class:`Alert`
+        :returns: :class:`LegacyAlert`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers)
-        return Alert.from_dict(res)
+        return LegacyAlert.from_dict(res)
 
-    def list(self) -> Iterator[Alert]:
+    def list(self) -> Iterator[LegacyAlert]:
         """Get alerts.
         
         Gets a list of alerts.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
-        :returns: Iterator over :class:`Alert`
+        :returns: Iterator over :class:`LegacyAlert`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', '/api/2.0/preview/sql/alerts', headers=headers)
-        return [Alert.from_dict(v) for v in res]
+        return [LegacyAlert.from_dict(v) for v in res]
 
     def update(self,
                alert_id: str,
@@ -4078,9 +5421,10 @@ def update(self,
         
         Updates an alert.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param alert_id: str
         :param name: str
@@ -4380,9 +5724,9 @@ class DataSourcesAPI:
     advise you to use any text editor, REST client, or `grep` to search the response from this API for the
     name of your SQL warehouse as it appears in Databricks SQL.
     
-    **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+    **Note**: A new version of the Databricks SQL API is now available. [Learn more]
     
-    [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources"""
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -4394,9 +5738,10 @@ def list(self) -> Iterator[DataSource]:
         API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
         queries against it.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :returns: Iterator over :class:`DataSource`
         """
@@ -4420,9 +5765,9 @@ class DbsqlPermissionsAPI:
     
     - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
     
-    **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+    **Note**: A new version of the Databricks SQL API is now available. [Learn more]
     
-    [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources"""
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -4432,9 +5777,10 @@ def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse:
         
         Gets a JSON representation of the access control list (ACL) for a specified object.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:workspace/getpermissions instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param object_type: :class:`ObjectTypePlural`
           The type of object permissions to check.
@@ -4461,9 +5807,10 @@ def set(self,
         Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
         ACL.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:workspace/setpermissions instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param object_type: :class:`ObjectTypePlural`
           The type of object permission to set.
@@ -4493,9 +5840,10 @@ def transfer_ownership(self,
         
         Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
+        :method:queries/update and :method:alerts/update respectively instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param object_type: :class:`OwnableObjectType`
           The type of object on which to change ownership.
@@ -4518,13 +5866,154 @@ def transfer_ownership(self,
 
 
 class QueriesAPI:
+    """The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that
+    includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
+    scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self, *, query: Optional[CreateQueryRequestQuery] = None) -> Query:
+        """Create a query.
+        
+        Creates a query.
+        
+        :param query: :class:`CreateQueryRequestQuery` (optional)
+        
+        :returns: :class:`Query`
+        """
+        body = {}
+        if query is not None: body['query'] = query.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/sql/queries', body=body, headers=headers)
+        return Query.from_dict(res)
+
+    def delete(self, id: str):
+        """Delete a query.
+        
+        Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
+        cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
+        permanently deleted after 30 days.
+        
+        :param id: str
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE', f'/api/2.0/sql/queries/{id}', headers=headers)
+
+    def get(self, id: str) -> Query:
+        """Get a query.
+        
+        Gets a query.
+        
+        :param id: str
+        
+        :returns: :class:`Query`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/sql/queries/{id}', headers=headers)
+        return Query.from_dict(res)
+
+    def list(self,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[ListQueryObjectsResponseQuery]:
+        """List queries.
+        
+        Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
+        concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ListQueryObjectsResponseQuery`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.0/sql/queries', query=query, headers=headers)
+            if 'results' in json:
+                for v in json['results']:
+                    yield ListQueryObjectsResponseQuery.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def list_visualizations(self,
+                            id: str,
+                            *,
+                            page_size: Optional[int] = None,
+                            page_token: Optional[str] = None) -> Iterator[Visualization]:
+        """List visualizations on a query.
+        
+        Gets a list of visualizations on a query.
+        
+        :param id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`Visualization`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/sql/queries/{id}/visualizations',
+                                query=query,
+                                headers=headers)
+            if 'results' in json:
+                for v in json['results']:
+                    yield Visualization.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryRequestQuery] = None) -> Query:
+        """Update a query.
+        
+        Updates a query.
+        
+        :param id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param query: :class:`UpdateQueryRequestQuery` (optional)
+        
+        :returns: :class:`Query`
+        """
+        body = {}
+        if query is not None: body['query'] = query.as_dict()
+        if update_mask is not None: body['update_mask'] = update_mask
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH', f'/api/2.0/sql/queries/{id}', body=body, headers=headers)
+        return Query.from_dict(res)
+
+
+class QueriesLegacyAPI:
     """These endpoints are used for CRUD operations on query definitions. Query definitions include the target
     SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
     scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
     
-    **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
     
-    [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources"""
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -4538,7 +6027,7 @@ def create(self,
                parent: Optional[str] = None,
                query: Optional[str] = None,
                run_as_role: Optional[RunAsRole] = None,
-               tags: Optional[List[str]] = None) -> Query:
+               tags: Optional[List[str]] = None) -> LegacyQuery:
         """Create a new query definition.
         
         Creates a new query definition. Queries created with this endpoint belong to the authenticated user
@@ -4550,9 +6039,10 @@ def create(self,
         
         **Note**: You cannot add a visualization until you create the query.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param data_source_id: str (optional)
           Data source ID maps to the ID of the data source used by the resource and is distinct from the
@@ -4576,7 +6066,7 @@ def create(self,
           viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
         :param tags: List[str] (optional)
         
-        :returns: :class:`Query`
+        :returns: :class:`LegacyQuery`
         """
         body = {}
         if data_source_id is not None: body['data_source_id'] = data_source_id
@@ -4590,7 +6080,7 @@ def create(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/preview/sql/queries', body=body, headers=headers)
-        return Query.from_dict(res)
+        return LegacyQuery.from_dict(res)
 
     def delete(self, query_id: str):
         """Delete a query.
@@ -4598,9 +6088,10 @@ def delete(self, query_id: str):
         Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
         they cannot be used for alerts. The trash is deleted after 30 days.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param query_id: str
         
@@ -4611,32 +6102,33 @@ def delete(self, query_id: str):
 
         self._api.do('DELETE', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers)
 
-    def get(self, query_id: str) -> Query:
+    def get(self, query_id: str) -> LegacyQuery:
         """Get a query definition.
         
         Retrieve a query object definition along with contextual permissions information about the currently
         authenticated user.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param query_id: str
         
-        :returns: :class:`Query`
+        :returns: :class:`LegacyQuery`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers)
-        return Query.from_dict(res)
+        return LegacyQuery.from_dict(res)
 
     def list(self,
              *,
              order: Optional[str] = None,
              page: Optional[int] = None,
              page_size: Optional[int] = None,
-             q: Optional[str] = None) -> Iterator[Query]:
+             q: Optional[str] = None) -> Iterator[LegacyQuery]:
         """Get a list of queries.
         
         Gets a list of queries. Optionally, this list can be filtered by a search term.
@@ -4644,9 +6136,10 @@ def list(self,
         **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
         degradation, or a temporary ban.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param order: str (optional)
           Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
@@ -4669,7 +6162,7 @@ def list(self,
         :param q: str (optional)
           Full text search term
         
-        :returns: Iterator over :class:`Query`
+        :returns: Iterator over :class:`LegacyQuery`
         """
 
         query = {}
@@ -4690,7 +6183,7 @@ def list(self,
                     if i in seen:
                         continue
                     seen.add(i)
-                    yield Query.from_dict(v)
+                    yield LegacyQuery.from_dict(v)
             if 'results' not in json or not json['results']:
                 return
             query['page'] += 1
@@ -4701,9 +6194,10 @@ def restore(self, query_id: str):
         Restore a query that has been moved to the trash. A restored query appears in list views and searches.
         You can use restored queries for alerts.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
+        [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param query_id: str
         
@@ -4723,16 +6217,17 @@ def update(self,
                options: Optional[Any] = None,
                query: Optional[str] = None,
                run_as_role: Optional[RunAsRole] = None,
-               tags: Optional[List[str]] = None) -> Query:
+               tags: Optional[List[str]] = None) -> LegacyQuery:
         """Change a query definition.
         
         Modify this query definition.
         
         **Note**: You cannot undo this operation.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param query_id: str
         :param data_source_id: str (optional)
@@ -4755,7 +6250,7 @@ def update(self,
           viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
         :param tags: List[str] (optional)
         
-        :returns: :class:`Query`
+        :returns: :class:`LegacyQuery`
         """
         body = {}
         if data_source_id is not None: body['data_source_id'] = data_source_id
@@ -4768,11 +6263,12 @@ def update(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', f'/api/2.0/preview/sql/queries/{query_id}', body=body, headers=headers)
-        return Query.from_dict(res)
+        return LegacyQuery.from_dict(res)
 
 
 class QueryHistoryAPI:
-    """Access the history of queries through SQL warehouses."""
+    """A service responsible for storing and retrieving the list of queries run against SQL endpoints and
+    serverless compute."""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -4782,25 +6278,28 @@ def list(self,
              filter_by: Optional[QueryFilter] = None,
              include_metrics: Optional[bool] = None,
              max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[QueryInfo]:
+             page_token: Optional[str] = None) -> ListQueriesResponse:
         """List Queries.
         
-        List the history of queries through SQL warehouses.
+        List the history of queries through SQL warehouses, and serverless compute.
         
-        You can filter by user ID, warehouse ID, status, and time range.
+        You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
+        returned first (up to max_results in request). The pagination token returned in response can be used
+        to list subsequent query statuses.
         
         :param filter_by: :class:`QueryFilter` (optional)
           A filter to limit query history results. This field is optional.
         :param include_metrics: bool (optional)
-          Whether to include metrics about query.
+          Whether to include the query metrics with each query. Only use this for a small subset of queries
+          (max_results). Defaults to false.
         :param max_results: int (optional)
-          Limit the number of results returned in one page. The default is 100.
+          Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
         :param page_token: str (optional)
           A token that can be used to get the next page of results. The token can contains characters that
           need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
-          %2B.
+          %2B. This field is optional.
         
-        :returns: Iterator over :class:`QueryInfo`
+        :returns: :class:`ListQueriesResponse`
         """
 
         query = {}
@@ -4810,19 +6309,84 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        while True:
-            json = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers)
-            if 'res' in json:
-                for v in json['res']:
-                    yield QueryInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
-                return
-            query['page_token'] = json['next_page_token']
+        res = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers)
+        return ListQueriesResponse.from_dict(res)
 
 
 class QueryVisualizationsAPI:
+    """This is an evolving API that facilitates the addition and removal of visualizations from existing queries
+    in the Databricks Workspace. Data structures can change over time."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               *,
+               visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization:
+        """Add a visualization to a query.
+        
+        Adds a visualization to a query.
+        
+        :param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
+        
+        :returns: :class:`Visualization`
+        """
+        body = {}
+        if visualization is not None: body['visualization'] = visualization.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/sql/visualizations', body=body, headers=headers)
+        return Visualization.from_dict(res)
+
+    def delete(self, id: str):
+        """Remove a visualization.
+        
+        Removes a visualization.
+        
+        :param id: str
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE', f'/api/2.0/sql/visualizations/{id}', headers=headers)
+
+    def update(self,
+               id: str,
+               update_mask: str,
+               *,
+               visualization: Optional[UpdateVisualizationRequestVisualization] = None) -> Visualization:
+        """Update a visualization.
+        
+        Updates a visualization.
+        
+        :param id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
+        
+        :returns: :class:`Visualization`
+        """
+        body = {}
+        if update_mask is not None: body['update_mask'] = update_mask
+        if visualization is not None: body['visualization'] = visualization.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH', f'/api/2.0/sql/visualizations/{id}', body=body, headers=headers)
+        return Visualization.from_dict(res)
+
+
+class QueryVisualizationsLegacyAPI:
     """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries
-    within the Databricks Workspace. Data structures may change over time."""
+    within the Databricks Workspace. Data structures may change over time.
+    
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -4833,9 +6397,16 @@ def create(self,
                options: Any,
                *,
                description: Optional[str] = None,
-               name: Optional[str] = None) -> Visualization:
+               name: Optional[str] = None) -> LegacyVisualization:
         """Add visualization to a query.
         
+        Creates visualization in the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/create instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
         :param query_id: str
           The identifier returned by :method:queries/create
         :param type: str
@@ -4848,7 +6419,7 @@ def create(self,
         :param name: str (optional)
           The name of the visualization that appears on dashboards and the query screen.
         
-        :returns: :class:`Visualization`
+        :returns: :class:`LegacyVisualization`
         """
         body = {}
         if description is not None: body['description'] = description
@@ -4859,11 +6430,18 @@ def create(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/preview/sql/visualizations', body=body, headers=headers)
-        return Visualization.from_dict(res)
+        return LegacyVisualization.from_dict(res)
 
     def delete(self, id: str):
         """Remove visualization.
         
+        Removes a visualization from the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/delete instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
         :param id: str
           Widget ID returned by :method:queryvizualisations/create
         
@@ -4881,11 +6459,18 @@ def update(self,
                description: Optional[str] = None,
                name: Optional[str] = None,
                options: Optional[Any] = None,
-               query: Optional[Query] = None,
+               query: Optional[LegacyQuery] = None,
                type: Optional[str] = None,
-               updated_at: Optional[str] = None) -> Visualization:
+               updated_at: Optional[str] = None) -> LegacyVisualization:
         """Edit existing visualization.
         
+        Updates visualization in the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/update instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
         :param id: str
           The UUID for this visualization.
         :param created_at: str (optional)
@@ -4896,12 +6481,12 @@ def update(self,
         :param options: Any (optional)
           The options object varies widely from one visualization type to the next and is unsupported.
           Databricks does not recommend modifying visualization settings in JSON.
-        :param query: :class:`Query` (optional)
+        :param query: :class:`LegacyQuery` (optional)
         :param type: str (optional)
           The type of visualization: chart, table, pivot table, and so on.
         :param updated_at: str (optional)
         
-        :returns: :class:`Visualization`
+        :returns: :class:`LegacyVisualization`
         """
         body = {}
         if created_at is not None: body['created_at'] = created_at
@@ -4914,7 +6499,7 @@ def update(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', f'/api/2.0/preview/sql/visualizations/{id}', body=body, headers=headers)
-        return Visualization.from_dict(res)
+        return LegacyVisualization.from_dict(res)
 
 
 class StatementExecutionAPI:
@@ -4996,7 +6581,9 @@ class StatementExecutionAPI:
     are approximate, occur server-side, and cannot account for things such as caller delays and network
     latency from caller to service. - The system will auto-close a statement after one hour if the client
     stops polling and thus you must poll at least once an hour. - The results are only available for one hour
-    after success; polling does not extend this.
+    after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle
+    of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL
+    Execution API to cancel it.
     
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
     [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html"""
@@ -5033,7 +6620,7 @@ def execute_statement(self,
                           parameters: Optional[List[StatementParameterListItem]] = None,
                           row_limit: Optional[int] = None,
                           schema: Optional[str] = None,
-                          wait_timeout: Optional[str] = None) -> ExecuteStatementResponse:
+                          wait_timeout: Optional[str] = None) -> StatementResponse:
         """Execute a SQL statement.
         
         :param statement: str
@@ -5053,26 +6640,6 @@ def execute_statement(self,
           
           [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
         :param disposition: :class:`Disposition` (optional)
-          The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
-          
-          Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
-          format, in a series of chunks. If a given statement produces a result set with a size larger than 25
-          MiB, that statement execution is aborted, and no result set will be available.
-          
-          **NOTE** Byte limits are computed based upon internal representations of the result set data, and
-          might not match the sizes visible in JSON responses.
-          
-          Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
-          URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
-          allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
-          resulting links have two important properties:
-          
-          1. They point to resources _external_ to the Databricks compute; therefore any associated
-          authentication information (typically a personal access token, OAuth token, or similar) _must be
-          removed_ when fetching from these links.
-          
-          2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when
-          attempting to use an expired link is cloud specific.
         :param format: :class:`Format` (optional)
           Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
           `CSV`.
@@ -5160,7 +6727,7 @@ def execute_statement(self,
           the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
           timeout is reached.
         
-        :returns: :class:`ExecuteStatementResponse`
+        :returns: :class:`StatementResponse`
         """
         body = {}
         if byte_limit is not None: body['byte_limit'] = byte_limit
@@ -5177,9 +6744,9 @@ def execute_statement(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/sql/statements/', body=body, headers=headers)
-        return ExecuteStatementResponse.from_dict(res)
+        return StatementResponse.from_dict(res)
 
-    def get_statement(self, statement_id: str) -> GetStatementResponse:
+    def get_statement(self, statement_id: str) -> StatementResponse:
         """Get status, manifest, and result first chunk.
         
         This request can be used to poll for the statement's status. When the `status.state` field is
@@ -5194,13 +6761,13 @@ def get_statement(self, statement_id: str) -> GetStatementResponse:
           The statement ID is returned upon successfully submitting a SQL statement, and is a required
           reference for all subsequent calls.
         
-        :returns: :class:`GetStatementResponse`
+        :returns: :class:`StatementResponse`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.0/sql/statements/{statement_id}', headers=headers)
-        return GetStatementResponse.from_dict(res)
+        return StatementResponse.from_dict(res)
 
     def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData:
         """Get result chunk by index.
@@ -5322,7 +6889,8 @@ def create(
           The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
           is automatically stopped.
           
-          Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+          Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
+          non-serverless warehouses - 0 indicates no autostop.
           
           Defaults to 120 mins
         :param channel: :class:`Channel` (optional)
diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py
index 2f0ceaab8..d6c28b840 100755
--- a/databricks/sdk/service/vectorsearch.py
+++ b/databricks/sdk/service/vectorsearch.py
@@ -231,6 +231,11 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteIndexResponse:
 
 @dataclass
 class DeltaSyncVectorIndexSpecRequest:
+    columns_to_sync: Optional[List[str]] = None
+    """[Optional] Select the columns to sync with the vector index. If you leave this field blank, all
+    columns from the source table are synced with the index. The primary key column and embedding
+    source column or embedding vector column are always synced."""
+
     embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None
     """The columns that contain the embedding source."""
 
@@ -256,6 +261,7 @@ class DeltaSyncVectorIndexSpecRequest:
     def as_dict(self) -> dict:
         """Serializes the DeltaSyncVectorIndexSpecRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.columns_to_sync: body['columns_to_sync'] = [v for v in self.columns_to_sync]
         if self.embedding_source_columns:
             body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns]
         if self.embedding_vector_columns:
@@ -269,7 +275,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecRequest:
         """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary."""
-        return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns',
+        return cls(columns_to_sync=d.get('columns_to_sync', None),
+                   embedding_source_columns=_repeated_dict(d, 'embedding_source_columns',
                                                            EmbeddingSourceColumn),
                    embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
                                                            EmbeddingVectorColumn),
diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py
index 7be221323..7c8bfbd5e 100755
--- a/databricks/sdk/service/workspace.py
+++ b/databricks/sdk/service/workspace.py
@@ -64,11 +64,11 @@ def from_dict(cls, d: Dict[str, any]) -> AzureKeyVaultSecretScopeMetadata:
 
 
 @dataclass
-class CreateCredentials:
+class CreateCredentialsRequest:
     git_provider: str
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    """Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+    `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+    `gitLabEnterpriseEdition` and `awsCodeCommit`."""
 
     git_username: Optional[str] = None
     """The username or email provided with your Git provider account, depending on which provider you
@@ -79,13 +79,12 @@ class CreateCredentials:
 
     personal_access_token: Optional[str] = None
     """The personal access token used to authenticate to the corresponding Git provider. For certain
-    providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-    access token used to authenticate to the corresponding Git
+    providers, support may exist for other types of scoped access tokens. [Learn more].
     
     [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html"""
 
     def as_dict(self) -> dict:
-        """Serializes the CreateCredentials into a dictionary suitable for use as a JSON request body."""
+        """Serializes the CreateCredentialsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.git_provider is not None: body['git_provider'] = self.git_provider
         if self.git_username is not None: body['git_username'] = self.git_username
@@ -93,8 +92,8 @@ def as_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateCredentials:
-        """Deserializes the CreateCredentials from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsRequest:
+        """Deserializes the CreateCredentialsRequest from a dictionary."""
         return cls(git_provider=d.get('git_provider', None),
                    git_username=d.get('git_username', None),
                    personal_access_token=d.get('personal_access_token', None))
@@ -102,20 +101,15 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCredentials:
 
 @dataclass
 class CreateCredentialsResponse:
-    credential_id: Optional[int] = None
+    credential_id: int
     """ID of the credential object in the workspace."""
 
-    git_provider: Optional[str] = None
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    git_provider: str
+    """The Git provider associated with the credential."""
 
     git_username: Optional[str] = None
-    """The username or email provided with your Git provider account, depending on which provider you
-    are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
-    username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
-    CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please
-    see your provider's Personal Access Token authentication documentation to see what is supported."""
+    """The username or email provided with your Git provider account and associated with the
+    credential."""
 
     def as_dict(self) -> dict:
         """Serializes the CreateCredentialsResponse into a dictionary suitable for use as a JSON request body."""
@@ -134,25 +128,25 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsResponse:
 
 
 @dataclass
-class CreateRepo:
+class CreateRepoRequest:
     url: str
     """URL of the Git repository to be linked."""
 
     provider: str
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    """Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+    `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+    `gitLabEnterpriseEdition` and `awsCodeCommit`."""
 
     path: Optional[str] = None
     """Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If
-    repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}."""
+    repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`."""
 
     sparse_checkout: Optional[SparseCheckout] = None
     """If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
     sparse checkout after the repo is created."""
 
     def as_dict(self) -> dict:
-        """Serializes the CreateRepo into a dictionary suitable for use as a JSON request body."""
+        """Serializes the CreateRepoRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.path is not None: body['path'] = self.path
         if self.provider is not None: body['provider'] = self.provider
@@ -161,14 +155,61 @@ def as_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateRepo:
-        """Deserializes the CreateRepo from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> CreateRepoRequest:
+        """Deserializes the CreateRepoRequest from a dictionary."""
         return cls(path=d.get('path', None),
                    provider=d.get('provider', None),
                    sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
                    url=d.get('url', None))
 
 
+@dataclass
+class CreateRepoResponse:
+    branch: Optional[str] = None
+    """Branch that the Git folder (repo) is checked out to."""
+
+    head_commit_id: Optional[str] = None
+    """SHA-1 hash representing the commit ID of the current HEAD of the Git folder (repo)."""
+
+    id: Optional[int] = None
+    """ID of the Git folder (repo) object in the workspace."""
+
+    path: Optional[str] = None
+    """Path of the Git folder (repo) in the workspace."""
+
+    provider: Optional[str] = None
+    """Git provider of the linked Git repository."""
+
+    sparse_checkout: Optional[SparseCheckout] = None
+    """Sparse checkout settings for the Git folder (repo)."""
+
+    url: Optional[str] = None
+    """URL of the linked Git repository."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateRepoResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateRepoResponse:
+        """Deserializes the CreateRepoResponse from a dictionary."""
+        return cls(branch=d.get('branch', None),
+                   head_commit_id=d.get('head_commit_id', None),
+                   id=d.get('id', None),
+                   path=d.get('path', None),
+                   provider=d.get('provider', None),
+                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
+                   url=d.get('url', None))
+
+
 @dataclass
 class CreateScope:
     scope: str
@@ -219,20 +260,15 @@ def from_dict(cls, d: Dict[str, any]) -> CreateScopeResponse:
 
 @dataclass
 class CredentialInfo:
-    credential_id: Optional[int] = None
+    credential_id: int
     """ID of the credential object in the workspace."""
 
     git_provider: Optional[str] = None
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    gitHubOAuth, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    """The Git provider associated with the credential."""
 
     git_username: Optional[str] = None
-    """The username or email provided with your Git provider account, depending on which provider you
-    are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
-    username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
-    CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please
-    see your provider's Personal Access Token authentication documentation to see what is supported."""
+    """The username or email provided with your Git provider account and associated with the
+    credential."""
 
     def as_dict(self) -> dict:
         """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body."""
@@ -308,6 +344,34 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteAclResponse:
         return cls()
 
 
+@dataclass
+class DeleteCredentialsResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteCredentialsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialsResponse:
+        """Deserializes the DeleteCredentialsResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class DeleteRepoResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteRepoResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteRepoResponse:
+        """Deserializes the DeleteRepoResponse from a dictionary."""
+        return cls()
+
+
 @dataclass
 class DeleteResponse:
 
@@ -422,18 +486,30 @@ def from_dict(cls, d: Dict[str, any]) -> ExportResponse:
 
 @dataclass
 class GetCredentialsResponse:
-    credentials: Optional[List[CredentialInfo]] = None
+    credential_id: int
+    """ID of the credential object in the workspace."""
+
+    git_provider: Optional[str] = None
+    """The Git provider associated with the credential."""
+
+    git_username: Optional[str] = None
+    """The username or email provided with your Git provider account and associated with the
+    credential."""
 
     def as_dict(self) -> dict:
         """Serializes the GetCredentialsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCredentialsResponse:
         """Deserializes the GetCredentialsResponse from a dictionary."""
-        return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo))
+        return cls(credential_id=d.get('credential_id', None),
+                   git_provider=d.get('git_provider', None),
+                   git_username=d.get('git_username', None))
 
 
 @dataclass
@@ -453,6 +529,53 @@ def from_dict(cls, d: Dict[str, any]) -> GetRepoPermissionLevelsResponse:
         return cls(permission_levels=_repeated_dict(d, 'permission_levels', RepoPermissionsDescription))
 
 
+@dataclass
+class GetRepoResponse:
+    branch: Optional[str] = None
+    """Branch that the local version of the repo is checked out to."""
+
+    head_commit_id: Optional[str] = None
+    """SHA-1 hash representing the commit ID of the current HEAD of the repo."""
+
+    id: Optional[int] = None
+    """ID of the Git folder (repo) object in the workspace."""
+
+    path: Optional[str] = None
+    """Path of the Git folder (repo) in the workspace."""
+
+    provider: Optional[str] = None
+    """Git provider of the linked Git repository."""
+
+    sparse_checkout: Optional[SparseCheckout] = None
+    """Sparse checkout settings for the Git folder (repo)."""
+
+    url: Optional[str] = None
+    """URL of the linked Git repository."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GetRepoResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetRepoResponse:
+        """Deserializes the GetRepoResponse from a dictionary."""
+        return cls(branch=d.get('branch', None),
+                   head_commit_id=d.get('head_commit_id', None),
+                   id=d.get('id', None),
+                   path=d.get('path', None),
+                   provider=d.get('provider', None),
+                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
+                   url=d.get('url', None))
+
+
 @dataclass
 class GetSecretResponse:
     key: Optional[str] = None
@@ -605,13 +728,31 @@ def from_dict(cls, d: Dict[str, any]) -> ListAclsResponse:
         return cls(items=_repeated_dict(d, 'items', AclItem))
 
 
+@dataclass
+class ListCredentialsResponse:
+    credentials: Optional[List[CredentialInfo]] = None
+    """List of credentials."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
+        """Deserializes the ListCredentialsResponse from a dictionary."""
+        return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo))
+
+
 @dataclass
 class ListReposResponse:
     next_page_token: Optional[str] = None
-    """Token that can be specified as a query parameter to the GET /repos endpoint to retrieve the next
-    page of results."""
+    """Token that can be specified as a query parameter to the `GET /repos` endpoint to retrieve the
+    next page of results."""
 
     repos: Optional[List[RepoInfo]] = None
+    """List of Git folders (repos)."""
 
     def as_dict(self) -> dict:
         """Serializes the ListReposResponse into a dictionary suitable for use as a JSON request body."""
@@ -940,28 +1081,28 @@ def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlResponse:
 
 @dataclass
 class RepoInfo:
+    """Git folder (repo) information."""
+
     branch: Optional[str] = None
-    """Branch that the local version of the repo is checked out to."""
+    """Name of the current git branch of the git folder (repo)."""
 
     head_commit_id: Optional[str] = None
-    """SHA-1 hash representing the commit ID of the current HEAD of the repo."""
+    """Current git commit id of the git folder (repo)."""
 
     id: Optional[int] = None
-    """ID of the repo object in the workspace."""
+    """Id of the git folder (repo) in the Workspace."""
 
     path: Optional[str] = None
-    """Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If
-    repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}."""
+    """Root path of the git folder (repo) in the Workspace."""
 
     provider: Optional[str] = None
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    """Git provider of the remote git repository, e.g. `gitHub`."""
 
     sparse_checkout: Optional[SparseCheckout] = None
+    """Sparse checkout config for the git folder (repo)."""
 
     url: Optional[str] = None
-    """URL of the Git repository to be linked."""
+    """URL of the remote git repository."""
 
     def as_dict(self) -> dict:
         """Serializes the RepoInfo into a dictionary suitable for use as a JSON request body."""
@@ -1146,8 +1287,12 @@ def from_dict(cls, d: Dict[str, any]) -> SecretScope:
 
 @dataclass
 class SparseCheckout:
+    """Sparse checkout configuration, it contains options like cone patterns."""
+
     patterns: Optional[List[str]] = None
-    """List of patterns to include for sparse checkout."""
+    """List of sparse checkout cone patterns, see [cone mode handling] for details.
+    
+    [cone mode handling]: https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling"""
 
     def as_dict(self) -> dict:
         """Serializes the SparseCheckout into a dictionary suitable for use as a JSON request body."""
@@ -1163,8 +1308,12 @@ def from_dict(cls, d: Dict[str, any]) -> SparseCheckout:
 
 @dataclass
 class SparseCheckoutUpdate:
+    """Sparse checkout configuration, it contains options like cone patterns."""
+
     patterns: Optional[List[str]] = None
-    """List of patterns to include for sparse checkout."""
+    """List of sparse checkout cone patterns, see [cone mode handling] for details.
+    
+    [cone mode handling]: https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling"""
 
     def as_dict(self) -> dict:
         """Serializes the SparseCheckoutUpdate into a dictionary suitable for use as a JSON request body."""
@@ -1179,15 +1328,15 @@ def from_dict(cls, d: Dict[str, any]) -> SparseCheckoutUpdate:
 
 
 @dataclass
-class UpdateCredentials:
+class UpdateCredentialsRequest:
+    git_provider: str
+    """Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+    `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+    `gitLabEnterpriseEdition` and `awsCodeCommit`."""
+
     credential_id: Optional[int] = None
     """The ID for the corresponding credential to access."""
 
-    git_provider: Optional[str] = None
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
-
     git_username: Optional[str] = None
     """The username or email provided with your Git provider account, depending on which provider you
     are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
@@ -1197,13 +1346,12 @@ class UpdateCredentials:
 
     personal_access_token: Optional[str] = None
     """The personal access token used to authenticate to the corresponding Git provider. For certain
-    providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-    access token used to authenticate to the corresponding Git
+    providers, support may exist for other types of scoped access tokens. [Learn more].
     
     [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html"""
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateCredentials into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateCredentialsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.credential_id is not None: body['credential_id'] = self.credential_id
         if self.git_provider is not None: body['git_provider'] = self.git_provider
@@ -1212,8 +1360,8 @@ def as_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateCredentials:
-        """Deserializes the UpdateCredentials from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsRequest:
+        """Deserializes the UpdateCredentialsRequest from a dictionary."""
         return cls(credential_id=d.get('credential_id', None),
                    git_provider=d.get('git_provider', None),
                    git_username=d.get('git_username', None),
@@ -1221,12 +1369,26 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCredentials:
 
 
 @dataclass
-class UpdateRepo:
+class UpdateCredentialsResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateCredentialsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsResponse:
+        """Deserializes the UpdateCredentialsResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class UpdateRepoRequest:
     branch: Optional[str] = None
     """Branch that the local version of the repo is checked out to."""
 
     repo_id: Optional[int] = None
-    """The ID for the corresponding repo to access."""
+    """ID of the Git folder (repo) object in the workspace."""
 
     sparse_checkout: Optional[SparseCheckoutUpdate] = None
     """If specified, update the sparse checkout settings. The update will fail if sparse checkout is
@@ -1238,7 +1400,7 @@ class UpdateRepo:
     branch instead of the detached HEAD."""
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateRepo into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateRepoRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.branch is not None: body['branch'] = self.branch
         if self.repo_id is not None: body['repo_id'] = self.repo_id
@@ -1247,8 +1409,8 @@ def as_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateRepo:
-        """Deserializes the UpdateRepo from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> UpdateRepoRequest:
+        """Deserializes the UpdateRepoRequest from a dictionary."""
         return cls(branch=d.get('branch', None),
                    repo_id=d.get('repo_id', None),
                    sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckoutUpdate),
@@ -1256,16 +1418,16 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRepo:
 
 
 @dataclass
-class UpdateResponse:
+class UpdateRepoResponse:
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateRepoResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
-        """Deserializes the UpdateResponse from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> UpdateRepoResponse:
+        """Deserializes the UpdateRepoResponse from a dictionary."""
         return cls()
 
 
@@ -1471,9 +1633,9 @@ def create(self,
         existing credentials, or the DELETE endpoint to delete existing credentials.
         
         :param git_provider: str
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param git_username: str (optional)
           The username or email provided with your Git provider account, depending on which provider you are
           using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
@@ -1482,8 +1644,7 @@ def create(self,
           Access Token authentication documentation to see what is supported.
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-          access token used to authenticate to the corresponding Git
+          providers, support may exist for other types of scoped access tokens. [Learn more].
           
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
         
@@ -1509,11 +1670,11 @@ def delete(self, credential_id: int):
         
         """
 
-        headers = {}
+        headers = {'Accept': 'application/json', }
 
         self._api.do('DELETE', f'/api/2.0/git-credentials/{credential_id}', headers=headers)
 
-    def get(self, credential_id: int) -> CredentialInfo:
+    def get(self, credential_id: int) -> GetCredentialsResponse:
         """Get a credential entry.
         
         Gets the Git credential with the specified credential ID.
@@ -1521,13 +1682,13 @@ def get(self, credential_id: int) -> CredentialInfo:
         :param credential_id: int
           The ID for the corresponding credential to access.
         
-        :returns: :class:`CredentialInfo`
+        :returns: :class:`GetCredentialsResponse`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.0/git-credentials/{credential_id}', headers=headers)
-        return CredentialInfo.from_dict(res)
+        return GetCredentialsResponse.from_dict(res)
 
     def list(self) -> Iterator[CredentialInfo]:
         """Get Git credentials.
@@ -1540,13 +1701,13 @@ def list(self) -> Iterator[CredentialInfo]:
         headers = {'Accept': 'application/json', }
 
         json = self._api.do('GET', '/api/2.0/git-credentials', headers=headers)
-        parsed = GetCredentialsResponse.from_dict(json).credentials
+        parsed = ListCredentialsResponse.from_dict(json).credentials
         return parsed if parsed is not None else []
 
     def update(self,
                credential_id: int,
+               git_provider: str,
                *,
-               git_provider: Optional[str] = None,
                git_username: Optional[str] = None,
                personal_access_token: Optional[str] = None):
         """Update a credential.
@@ -1555,10 +1716,10 @@ def update(self,
         
         :param credential_id: int
           The ID for the corresponding credential to access.
-        :param git_provider: str (optional)
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+        :param git_provider: str
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param git_username: str (optional)
           The username or email provided with your Git provider account, depending on which provider you are
           using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
@@ -1567,8 +1728,7 @@ def update(self,
           Access Token authentication documentation to see what is supported.
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-          access token used to authenticate to the corresponding Git
+          providers, support may exist for other types of scoped access tokens. [Learn more].
           
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
         
@@ -1602,7 +1762,7 @@ def create(self,
                provider: str,
                *,
                path: Optional[str] = None,
-               sparse_checkout: Optional[SparseCheckout] = None) -> RepoInfo:
+               sparse_checkout: Optional[SparseCheckout] = None) -> CreateRepoResponse:
         """Create a repo.
         
         Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created
@@ -1611,17 +1771,17 @@ def create(self,
         :param url: str
           URL of the Git repository to be linked.
         :param provider: str
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param path: str (optional)
           Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
-          is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}.
+          is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
         :param sparse_checkout: :class:`SparseCheckout` (optional)
           If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
           sparse checkout after the repo is created.
         
-        :returns: :class:`RepoInfo`
+        :returns: :class:`CreateRepoResponse`
         """
         body = {}
         if path is not None: body['path'] = path
@@ -1631,7 +1791,7 @@ def create(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/repos', body=body, headers=headers)
-        return RepoInfo.from_dict(res)
+        return CreateRepoResponse.from_dict(res)
 
     def delete(self, repo_id: int):
         """Delete a repo.
@@ -1639,30 +1799,30 @@ def delete(self, repo_id: int):
         Deletes the specified repo.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         
         
         """
 
-        headers = {}
+        headers = {'Accept': 'application/json', }
 
         self._api.do('DELETE', f'/api/2.0/repos/{repo_id}', headers=headers)
 
-    def get(self, repo_id: int) -> RepoInfo:
+    def get(self, repo_id: int) -> GetRepoResponse:
         """Get a repo.
         
         Returns the repo with the given repo ID.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         
-        :returns: :class:`RepoInfo`
+        :returns: :class:`GetRepoResponse`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.0/repos/{repo_id}', headers=headers)
-        return RepoInfo.from_dict(res)
+        return GetRepoResponse.from_dict(res)
 
     def get_permission_levels(self, repo_id: str) -> GetRepoPermissionLevelsResponse:
         """Get repo permission levels.
@@ -1702,15 +1862,16 @@ def list(self,
              path_prefix: Optional[str] = None) -> Iterator[RepoInfo]:
         """Get repos.
         
-        Returns repos that the calling user has Manage permissions on. Results are paginated with each page
-        containing twenty repos.
+        Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
+        through additional pages.
         
         :param next_page_token: str (optional)
           Token used to get the next page of results. If not specified, returns the first page of results as
           well as a next page token if there are more results.
         :param path_prefix: str (optional)
-          Filters repos that have paths starting with the given path prefix. If not provided repos from /Repos
-          will be served.
+          Filters repos that have paths starting with the given path prefix. If not provided or when provided
+          an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
+          be served.
         
         :returns: Iterator over :class:`RepoInfo`
         """
@@ -1764,7 +1925,7 @@ def update(self,
         branch.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         :param branch: str (optional)
           Branch that the local version of the repo is checked out to.
         :param sparse_checkout: :class:`SparseCheckoutUpdate` (optional)
diff --git a/databricks/sdk/useragent.py b/databricks/sdk/useragent.py
new file mode 100644
index 000000000..5b15d2822
--- /dev/null
+++ b/databricks/sdk/useragent.py
@@ -0,0 +1,151 @@
+import copy
+import logging
+import os
+import platform
+import re
+from typing import List, Optional, Tuple
+
+from .version import __version__
+
+# Constants
+RUNTIME_KEY = 'runtime'
+CICD_KEY = 'cicd'
+AUTH_KEY = 'auth'
+
+_product_name = "unknown"
+_product_version = "0.0.0"
+
+logger = logging.getLogger("databricks.sdk.useragent")
+
+_extra = []
+
+# Precompiled regex patterns
+alphanum_pattern = re.compile(r'^[a-zA-Z0-9_.+-]+$')
+
+# official https://semver.org/ recommendation: https://regex101.com/r/Ly7O1x/
+# with addition of "x" wildcards for minor/patch versions. Also, patch version may be omitted.
+semver_pattern = re.compile(r"^"
+                            r"(?P0|[1-9]\d*)\.(?Px|0|[1-9]\d*)(\.(?Px|0|[1-9x]\d*))?"
+                            r"(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)"
+                            r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?"
+                            r"(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
+
+
+def _match_alphanum(value):
+    if not alphanum_pattern.match(value):
+        raise ValueError(f"Invalid alphanumeric value: {value}")
+
+
+def _match_semver(value):
+    if not semver_pattern.match(value):
+        raise ValueError(f"Invalid semantic version: {value}")
+
+
+def _match_alphanum_or_semver(value):
+    if not alphanum_pattern.match(value) and not semver_pattern.match(value):
+        raise ValueError(f"Invalid value: {value}")
+
+
+def product() -> Tuple[str, str]:
+    """Return the global product name and version that will be submitted to Databricks on every request."""
+    return _product_name, _product_version
+
+
+def with_product(name: str, version: str):
+    """Change the product name and version that will be submitted to Databricks on every request."""
+    global _product_name, _product_version
+    _match_alphanum(name)
+    _match_semver(version)
+    logger.debug(f'Changing product from {_product_name}/{_product_version} to {name}/{version}')
+    _product_name = name
+    _product_version = version
+
+
+def _reset_product():
+    """[Internal API] Reset product name and version to the default values.
+
+    Used for testing purposes only."""
+    global _product_name, _product_version
+    _product_name = "unknown"
+    _product_version = "0.0.0"
+
+
+def with_extra(key: str, value: str):
+    """Add extra metadata to all requests submitted to Databricks.
+
+    User-specified extra metadata can be inserted into request headers to provide additional context to Databricks
+    about usage of different tools in the Databricks ecosystem. This can be useful for collecting telemetry about SDK
+    usage from tools that are built on top of the SDK.
+    """
+    global _extra
+    _match_alphanum(key)
+    _match_alphanum_or_semver(value)
+    logger.debug(f'Adding {key}/{value} to User-Agent')
+    _extra.append((key, value))
+
+
+def extra() -> List[Tuple[str, str]]:
+    """Returns the current extra metadata that will be submitted to Databricks on every request."""
+    return copy.deepcopy(_extra)
+
+
+def _reset_extra(extra: List[Tuple[str, str]]):
+    """[INTERNAL API] Reset the extra metadata to a new list.
+
+    Prefer using with_user_agent_extra instead of this method to avoid overwriting other information included in the
+    user agent."""
+    global _extra
+    _extra = extra
+
+
+def with_partner(partner: str):
+    """Adds the given partner to the metadata submitted to Databricks on every request."""
+    with_extra("partner", partner)
+
+
+def _get_upstream_user_agent_info() -> List[Tuple[str, str]]:
+    """[INTERNAL API] Return the upstream product and version if specified in the system environment."""
+    product = os.getenv("DATABRICKS_SDK_UPSTREAM")
+    version = os.getenv("DATABRICKS_SDK_UPSTREAM_VERSION")
+    if not product or not version:
+        return []
+    return [("upstream", product), ("upstream-version", version)]
+
+
+def _get_runtime_info() -> List[Tuple[str, str]]:
+    """[INTERNAL API] Return the runtime version if running on Databricks."""
+    if 'DATABRICKS_RUNTIME_VERSION' in os.environ:
+        runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION']
+        if runtime_version != '':
+            runtime_version = _sanitize_header_value(runtime_version)
+            return [('runtime', runtime_version)]
+    return []
+
+
+def _sanitize_header_value(value: str) -> str:
+    value = value.replace(' ', '-')
+    value = value.replace('/', '-')
+    return value
+
+
+def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
+              other_info: Optional[List[Tuple[str, str]]] = None) -> str:
+    """Compute the full User-Agent header.
+
+    The User-Agent header contains the product name, version, and other metadata that is submitted to Databricks on
+    every request. There are some static components that are included by default in every request, like the SDK version,
+    OS name, and Python version. Other components can be optionally overridden or augmented in DatabricksConfig, like
+    the product name, product version, and extra user-defined information."""
+    base = []
+    if alternate_product_info:
+        base.append(alternate_product_info)
+    else:
+        base.append((_product_name, _product_version))
+    base.extend([("databricks-sdk-py", __version__), ("python", platform.python_version()),
+                 ("os", platform.uname().system.lower()), ])
+    if other_info:
+        base.extend(other_info)
+    base.extend(_extra)
+    base.extend(_get_upstream_user_agent_info())
+    base.extend(_get_runtime_info())
+    return " ".join(f"{k}/{v}" for k, v in base)
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 9093e4e46..aae5aca67 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.29.0'
+__version__ = '0.36.0'
diff --git a/docs/account/billing/billable_usage.rst b/docs/account/billing/billable_usage.rst
index 51c2eb2fa..181b91cc3 100644
--- a/docs/account/billing/billable_usage.rst
+++ b/docs/account/billing/billable_usage.rst
@@ -18,7 +18,7 @@
             
             a = AccountClient()
             
-            resp = a.billable_usage.download(start_month="2023-01", end_month="2023-02")
+            resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09")
 
         Return billable usage logs.
         
diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst
index 85f7ee133..edba0a733 100644
--- a/docs/account/billing/budgets.rst
+++ b/docs/account/billing/budgets.rst
@@ -4,10 +4,11 @@
 
 .. py:class:: BudgetsAPI
 
-    These APIs manage budget configuration including notifications for exceeding a budget for a period. They
-    can also retrieve the status of each budget.
+    These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your
+    account. You can set up budgets to either track account-wide spending, or apply filters to track the
+    spending of specific teams, projects, or workspaces.
 
-    .. py:method:: create(budget: Budget) -> WrappedBudgetWithStatus
+    .. py:method:: create(budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse
 
 
         Usage:
@@ -21,40 +22,55 @@
             
             a = AccountClient()
             
-            created = a.budgets.create(budget=billing.Budget(
-                name=f'sdk-{time.time_ns()}',
-                filter="tag.tagName = 'all'",
-                period="1 month",
-                start_date="2022-01-01",
-                target_amount="100",
-                alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)]))
+            created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
+                display_name=f'sdk-{time.time_ns()}',
+                filter=billing.BudgetConfigurationFilter(tags=[
+                    billing.BudgetConfigurationFilterTagClause(key="tagName",
+                                                               value=billing.BudgetConfigurationFilterClause(
+                                                                   operator=billing.BudgetConfigurationFilterOperator.IN,
+                                                                   values=["all"]))
+                ]),
+                alert_configurations=[
+                    billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+                        time_period=billing.AlertConfigurationTimePeriod.MONTH,
+                        quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+                        trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+                        quantity_threshold="100",
+                        action_configurations=[
+                            billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                                action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                                target="admin@example.com")
+                        ])
+                ]))
             
             # cleanup
-            a.budgets.delete(budget_id=created.budget.budget_id)
+            a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
-        Create a new budget.
+        Create new budget.
         
-        Creates a new budget in the specified account.
+        Create a new budget configuration for an account. For full details, see
+        https://docs.databricks.com/en/admin/account-settings/budgets.html.
         
-        :param budget: :class:`Budget`
-          Budget configuration to be created.
+        :param budget: :class:`CreateBudgetConfigurationBudget`
+          Properties of the new budget configuration.
         
-        :returns: :class:`WrappedBudgetWithStatus`
+        :returns: :class:`CreateBudgetConfigurationResponse`
         
 
     .. py:method:: delete(budget_id: str)
 
         Delete budget.
         
-        Deletes the budget specified by its UUID.
+        Deletes a budget configuration for an account. Both account and budget configuration are specified by
+        ID. This cannot be undone.
         
         :param budget_id: str
-          Budget ID
+          The Databricks budget configuration ID.
         
         
         
 
-    .. py:method:: get(budget_id: str) -> WrappedBudgetWithStatus
+    .. py:method:: get(budget_id: str) -> GetBudgetConfigurationResponse
 
 
         Usage:
@@ -68,31 +84,43 @@
             
             a = AccountClient()
             
-            created = a.budgets.create(budget=billing.Budget(
-                name=f'sdk-{time.time_ns()}',
-                filter="tag.tagName = 'all'",
-                period="1 month",
-                start_date="2022-01-01",
-                target_amount="100",
-                alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)]))
-            
-            by_id = a.budgets.get(budget_id=created.budget.budget_id)
+            created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
+                display_name=f'sdk-{time.time_ns()}',
+                filter=billing.BudgetConfigurationFilter(tags=[
+                    billing.BudgetConfigurationFilterTagClause(key="tagName",
+                                                               value=billing.BudgetConfigurationFilterClause(
+                                                                   operator=billing.BudgetConfigurationFilterOperator.IN,
+                                                                   values=["all"]))
+                ]),
+                alert_configurations=[
+                    billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+                        time_period=billing.AlertConfigurationTimePeriod.MONTH,
+                        quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+                        trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+                        quantity_threshold="100",
+                        action_configurations=[
+                            billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                                action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                                target="admin@example.com")
+                        ])
+                ]))
+            
+            by_id = a.budgets.get(budget_id=created.budget.budget_configuration_id)
             
             # cleanup
-            a.budgets.delete(budget_id=created.budget.budget_id)
+            a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
-        Get budget and its status.
+        Get budget.
         
-        Gets the budget specified by its UUID, including noncumulative status for each day that the budget is
-        configured to include.
+        Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
         
         :param budget_id: str
-          Budget ID
+          The Databricks budget configuration ID.
         
-        :returns: :class:`WrappedBudgetWithStatus`
+        :returns: :class:`GetBudgetConfigurationResponse`
         
 
-    .. py:method:: list() -> Iterator[BudgetWithStatus]
+    .. py:method:: list( [, page_token: Optional[str]]) -> Iterator[BudgetConfiguration]
 
 
         Usage:
@@ -100,20 +128,24 @@
         .. code-block::
 
             from databricks.sdk import AccountClient
+            from databricks.sdk.service import billing
             
             a = AccountClient()
             
-            all = a.budgets.list()
+            all = a.budgets.list(billing.ListBudgetConfigurationsRequest())
 
         Get all budgets.
         
-        Gets all budgets associated with this account, including noncumulative status for each day that the
-        budget is configured to include.
+        Gets all budgets associated with this account.
         
-        :returns: Iterator over :class:`BudgetWithStatus`
+        :param page_token: str (optional)
+          A page token received from a previous get all budget configurations call. This token can be used to
+          retrieve the subsequent page. Requests first page if absent.
+        
+        :returns: Iterator over :class:`BudgetConfiguration`
         
 
-    .. py:method:: update(budget_id: str, budget: Budget)
+    .. py:method:: update(budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse
 
 
         Usage:
@@ -127,36 +159,60 @@
             
             a = AccountClient()
             
-            created = a.budgets.create(budget=billing.Budget(
-                name=f'sdk-{time.time_ns()}',
-                filter="tag.tagName = 'all'",
-                period="1 month",
-                start_date="2022-01-01",
-                target_amount="100",
-                alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)]))
-            
-            a.budgets.update(budget_id=created.budget.budget_id,
-                             budget=billing.Budget(name=f'sdk-{time.time_ns()}',
-                                                   filter="tag.tagName = 'all'",
-                                                   period="1 month",
-                                                   start_date="2022-01-01",
-                                                   target_amount="100",
-                                                   alerts=[
-                                                       billing.BudgetAlert(email_notifications=["admin@example.com"],
-                                                                           min_percentage=70)
-                                                   ]))
+            created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
+                display_name=f'sdk-{time.time_ns()}',
+                filter=billing.BudgetConfigurationFilter(tags=[
+                    billing.BudgetConfigurationFilterTagClause(key="tagName",
+                                                               value=billing.BudgetConfigurationFilterClause(
+                                                                   operator=billing.BudgetConfigurationFilterOperator.IN,
+                                                                   values=["all"]))
+                ]),
+                alert_configurations=[
+                    billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+                        time_period=billing.AlertConfigurationTimePeriod.MONTH,
+                        quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+                        trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+                        quantity_threshold="100",
+                        action_configurations=[
+                            billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                                action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                                target="admin@example.com")
+                        ])
+                ]))
+            
+            _ = a.budgets.update(
+                budget_id=created.budget.budget_configuration_id,
+                budget=billing.UpdateBudgetConfigurationBudget(
+                    budget_configuration_id=created.budget.budget_configuration_id,
+                    display_name=f'sdk-{time.time_ns()}',
+                    filter=billing.BudgetConfigurationFilter(tags=[
+                        billing.BudgetConfigurationFilterTagClause(
+                            key="tagName",
+                            value=billing.BudgetConfigurationFilterClause(
+                                operator=billing.BudgetConfigurationFilterOperator.IN, values=["all"]))
+                    ]),
+                    alert_configurations=[
+                        billing.AlertConfiguration(
+                            alert_configuration_id=created.budget.alert_configurations[0].alert_configuration_id,
+                            time_period=billing.AlertConfigurationTimePeriod.MONTH,
+                            quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+                            trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+                            quantity_threshold="50",
+                            action_configurations=created.budget.alert_configurations[0].action_configurations)
+                    ]))
             
             # cleanup
-            a.budgets.delete(budget_id=created.budget.budget_id)
+            a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
         Modify budget.
         
-        Modifies a budget in this account. Budget properties are completely overwritten.
+        Updates a budget configuration for an account. Both account and budget configuration are specified by
+        ID.
         
         :param budget_id: str
-          Budget ID
-        :param budget: :class:`Budget`
-          Budget configuration to be created.
-        
+          The Databricks budget configuration ID.
+        :param budget: :class:`UpdateBudgetConfigurationBudget`
+          The updated budget. This will overwrite the budget specified by the budget ID.
         
+        :returns: :class:`UpdateBudgetConfigurationResponse`
         
\ No newline at end of file
diff --git a/docs/account/billing/index.rst b/docs/account/billing/index.rst
index 522f6f5fd..0e07da594 100644
--- a/docs/account/billing/index.rst
+++ b/docs/account/billing/index.rst
@@ -9,4 +9,5 @@ Configure different aspects of Databricks billing and usage.
 
    billable_usage
    budgets
-   log_delivery
\ No newline at end of file
+   log_delivery
+   usage_dashboards
\ No newline at end of file
diff --git a/docs/account/billing/usage_dashboards.rst b/docs/account/billing/usage_dashboards.rst
new file mode 100644
index 000000000..350ef1f08
--- /dev/null
+++ b/docs/account/billing/usage_dashboards.rst
@@ -0,0 +1,39 @@
+``a.usage_dashboards``: Usage Dashboards
+========================================
+.. currentmodule:: databricks.sdk.service.billing
+
+.. py:class:: UsageDashboardsAPI
+
+    These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into
+    your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost
+    drivers.
+
+    .. py:method:: create( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> CreateBillingUsageDashboardResponse
+
+        Create new usage dashboard.
+        
+        Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
+        
+        :param dashboard_type: :class:`UsageDashboardType` (optional)
+          Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
+          dashboard shows usage data for all workspaces in the account.
+        :param workspace_id: int (optional)
+          The workspace ID of the workspace in which the usage dashboard is created.
+        
+        :returns: :class:`CreateBillingUsageDashboardResponse`
+        
+
+    .. py:method:: get( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> GetBillingUsageDashboardResponse
+
+        Get usage dashboard.
+        
+        Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
+        
+        :param dashboard_type: :class:`UsageDashboardType` (optional)
+          Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
+          dashboard shows usage data for all workspaces in the account.
+        :param workspace_id: int (optional)
+          The workspace ID of the workspace in which the usage dashboard is created.
+        
+        :returns: :class:`GetBillingUsageDashboardResponse`
+        
\ No newline at end of file
diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst
index 1ce06996e..6230b8199 100644
--- a/docs/account/iam/workspace_assignment.rst
+++ b/docs/account/iam/workspace_assignment.rst
@@ -15,7 +15,7 @@
         principal.
         
         :param workspace_id: int
-          The workspace ID.
+          The workspace ID for the account.
         :param principal_id: int
           The ID of the user, service principal, or group.
         
@@ -61,7 +61,7 @@
         :returns: Iterator over :class:`PermissionAssignment`
         
 
-    .. py:method:: update(workspace_id: int, principal_id: int, permissions: List[WorkspacePermission]) -> PermissionAssignment
+    .. py:method:: update(workspace_id: int, principal_id: int [, permissions: Optional[List[WorkspacePermission]]]) -> PermissionAssignment
 
 
         Usage:
@@ -92,13 +92,15 @@
         specified principal.
         
         :param workspace_id: int
-          The workspace ID.
+          The workspace ID for the account.
         :param principal_id: int
           The ID of the user, service principal, or group.
-        :param permissions: List[:class:`WorkspacePermission`]
-          Array of permissions assignments to update on the workspace. Note that excluding this field will
-          have the same effect as providing an empty list which will result in the deletion of all permissions
-          for the principal.
+        :param permissions: List[:class:`WorkspacePermission`] (optional)
+          Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN"
+          (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values
+          will be ignored. Note that excluding this field, or providing unsupported values, will have the same
+          effect as providing an empty list, which will result in the deletion of all permissions for the
+          principal.
         
         :returns: :class:`PermissionAssignment`
         
\ No newline at end of file
diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst
index 382ce0bd0..0dcc3d8e0 100644
--- a/docs/account/oauth2/custom_app_integration.rst
+++ b/docs/account/oauth2/custom_app_integration.rst
@@ -4,23 +4,23 @@
 
 .. py:class:: CustomAppIntegrationAPI
 
-    These APIs enable administrators to manage custom oauth app integrations, which is required for
+    These APIs enable administrators to manage custom OAuth app integrations, which is required for
     adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
 
-    .. py:method:: create(name: str, redirect_urls: List[str] [, confidential: Optional[bool], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput
+    .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput
 
         Create Custom OAuth App Integration.
         
         Create Custom OAuth App Integration.
         
-        You can retrieve the custom oauth app integration via :method:CustomAppIntegration/get.
+        You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get.
         
-        :param name: str
-          name of the custom oauth app
-        :param redirect_urls: List[str]
-          List of oauth redirect urls
         :param confidential: bool (optional)
-          indicates if an oauth client-secret should be generated
+          This field indicates whether an OAuth client secret is required to authenticate this client.
+        :param name: str (optional)
+          Name of the custom OAuth app
+        :param redirect_urls: List[str] (optional)
+          List of OAuth redirect urls
         :param scopes: List[str] (optional)
           OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid,
           profile, email.
@@ -34,11 +34,10 @@
 
         Delete Custom OAuth App Integration.
         
-        Delete an existing Custom OAuth App Integration. You can retrieve the custom oauth app integration via
+        Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via
         :method:CustomAppIntegration/get.
         
         :param integration_id: str
-          The oauth app integration ID.
         
         
         
@@ -50,16 +49,19 @@
         Gets the Custom OAuth App Integration for the given integration id.
         
         :param integration_id: str
-          The oauth app integration ID.
         
         :returns: :class:`GetCustomAppIntegrationOutput`
         
 
-    .. py:method:: list() -> Iterator[GetCustomAppIntegrationOutput]
+    .. py:method:: list( [, include_creator_username: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetCustomAppIntegrationOutput]
 
         Get custom oauth app integrations.
         
-        Get the list of custom oauth app integrations for the specified Databricks account
+        Get the list of custom OAuth app integrations for the specified Databricks account
+        
+        :param include_creator_username: bool (optional)
+        :param page_size: int (optional)
+        :param page_token: str (optional)
         
         :returns: Iterator over :class:`GetCustomAppIntegrationOutput`
         
@@ -68,15 +70,14 @@
 
         Updates Custom OAuth App Integration.
         
-        Updates an existing custom OAuth App Integration. You can retrieve the custom oauth app integration
+        Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
         via :method:CustomAppIntegration/get.
         
         :param integration_id: str
-          The oauth app integration ID.
         :param redirect_urls: List[str] (optional)
-          List of oauth redirect urls to be updated in the custom oauth app integration
+          List of OAuth redirect urls to be updated in the custom OAuth app integration
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
-          Token access policy to be updated in the custom oauth app integration
+          Token access policy to be updated in the custom OAuth app integration
         
         
         
\ No newline at end of file
diff --git a/docs/account/oauth2/o_auth_published_apps.rst b/docs/account/oauth2/o_auth_published_apps.rst
index 69aecb8ad..18c07c326 100644
--- a/docs/account/oauth2/o_auth_published_apps.rst
+++ b/docs/account/oauth2/o_auth_published_apps.rst
@@ -15,7 +15,7 @@
         Get all the available published OAuth apps in Databricks.
         
         :param page_size: int (optional)
-          The max number of OAuth published apps to return.
+          The max number of OAuth published apps to return in one page.
         :param page_token: str (optional)
           A token that can be used to get the next page of results.
         
diff --git a/docs/account/oauth2/published_app_integration.rst b/docs/account/oauth2/published_app_integration.rst
index 0488415cd..f59f2c4aa 100644
--- a/docs/account/oauth2/published_app_integration.rst
+++ b/docs/account/oauth2/published_app_integration.rst
@@ -4,7 +4,7 @@
 
 .. py:class:: PublishedAppIntegrationAPI
 
-    These APIs enable administrators to manage published oauth app integrations, which is required for
+    These APIs enable administrators to manage published OAuth app integrations, which is required for
     adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
 
     .. py:method:: create( [, app_id: Optional[str], token_access_policy: Optional[TokenAccessPolicy]]) -> CreatePublishedAppIntegrationOutput
@@ -13,10 +13,10 @@
         
         Create Published OAuth App Integration.
         
-        You can retrieve the published oauth app integration via :method:PublishedAppIntegration/get.
+        You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get.
         
         :param app_id: str (optional)
-          app_id of the oauth published app integration. For example power-bi, tableau-deskop
+          App id of the OAuth published app integration. For example power-bi, tableau-deskop
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy
         
@@ -27,11 +27,10 @@
 
         Delete Published OAuth App Integration.
         
-        Delete an existing Published OAuth App Integration. You can retrieve the published oauth app
+        Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app
         integration via :method:PublishedAppIntegration/get.
         
         :param integration_id: str
-          The oauth app integration ID.
         
         
         
@@ -43,16 +42,18 @@
         Gets the Published OAuth App Integration for the given integration id.
         
         :param integration_id: str
-          The oauth app integration ID.
         
         :returns: :class:`GetPublishedAppIntegrationOutput`
         
 
-    .. py:method:: list() -> Iterator[GetPublishedAppIntegrationOutput]
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetPublishedAppIntegrationOutput]
 
         Get published oauth app integrations.
         
-        Get the list of published oauth app integrations for the specified Databricks account
+        Get the list of published OAuth app integrations for the specified Databricks account
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
         
         :returns: Iterator over :class:`GetPublishedAppIntegrationOutput`
         
@@ -61,13 +62,12 @@
 
         Updates Published OAuth App Integration.
         
-        Updates an existing published OAuth App Integration. You can retrieve the published oauth app
+        Updates an existing published OAuth App Integration. You can retrieve the published OAuth app
         integration via :method:PublishedAppIntegration/get.
         
         :param integration_id: str
-          The oauth app integration ID.
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
-          Token access policy to be updated in the published oauth app integration
+          Token access policy to be updated in the published OAuth app integration
         
         
         
\ No newline at end of file
diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst
index abbed0f37..98c47cc9b 100644
--- a/docs/account/provisioning/workspaces.rst
+++ b/docs/account/provisioning/workspaces.rst
@@ -35,15 +35,15 @@
                 aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
                     role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
             
-            created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
-                                          aws_region=os.environ["AWS_REGION"],
-                                          credentials_id=role.credentials_id,
-                                          storage_configuration_id=storage.storage_configuration_id).result()
+            waiter = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
+                                         aws_region=os.environ["AWS_REGION"],
+                                         credentials_id=role.credentials_id,
+                                         storage_configuration_id=storage.storage_configuration_id)
             
             # cleanup
             a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
             a.credentials.delete(credentials_id=role.credentials_id)
-            a.workspaces.delete(workspace_id=created.workspace_id)
+            a.workspaces.delete(workspace_id=waiter.workspace_id)
 
         Create a new workspace.
         
@@ -175,34 +175,13 @@
 
         .. code-block::
 
-            import os
-            import time
-            
             from databricks.sdk import AccountClient
-            from databricks.sdk.service import provisioning
             
             a = AccountClient()
             
-            storage = a.storage.create(
-                storage_configuration_name=f'sdk-{time.time_ns()}',
-                root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]))
-            
-            role = a.credentials.create(
-                credentials_name=f'sdk-{time.time_ns()}',
-                aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-                    role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
-            
-            created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
-                                          aws_region=os.environ["AWS_REGION"],
-                                          credentials_id=role.credentials_id,
-                                          storage_configuration_id=storage.storage_configuration_id).result()
+            created = a.waiter.get()
             
             by_id = a.workspaces.get(workspace_id=created.workspace_id)
-            
-            # cleanup
-            a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
-            a.credentials.delete(credentials_id=role.credentials_id)
-            a.workspaces.delete(workspace_id=created.workspace_id)
 
         Get a workspace.
         
@@ -263,32 +242,17 @@
             
             a = AccountClient()
             
-            storage = a.storage.create(
-                storage_configuration_name=f'sdk-{time.time_ns()}',
-                root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]))
-            
-            role = a.credentials.create(
-                credentials_name=f'sdk-{time.time_ns()}',
-                aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-                    role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
-            
             update_role = a.credentials.create(
                 credentials_name=f'sdk-{time.time_ns()}',
                 aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
                     role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
             
-            created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
-                                          aws_region=os.environ["AWS_REGION"],
-                                          credentials_id=role.credentials_id,
-                                          storage_configuration_id=storage.storage_configuration_id).result()
+            created = a.waiter.get()
             
             _ = a.workspaces.update(workspace_id=created.workspace_id, credentials_id=update_role.credentials_id).result()
             
             # cleanup
-            a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
-            a.credentials.delete(credentials_id=role.credentials_id)
             a.credentials.delete(credentials_id=update_role.credentials_id)
-            a.workspaces.delete(workspace_id=created.workspace_id)
 
         Update workspace configuration.
         
diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst
new file mode 100644
index 000000000..d7f1db9d3
--- /dev/null
+++ b/docs/account/settings/disable_legacy_features.rst
@@ -0,0 +1,60 @@
+``a.settings.disable_legacy_features``: Disable Legacy Features
+===============================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: DisableLegacyFeaturesAPI
+
+    Disable legacy features for new Databricks workspaces.
+    
+    For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+    provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+    prior to 13.3LTS.
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse
+
+        Delete the disable legacy features setting.
+        
+        Deletes the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyFeaturesResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures
+
+        Get the disable legacy features setting.
+        
+        Gets the value of the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyFeatures`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures
+
+        Update the disable legacy features setting.
+        
+        Updates the value of the disable legacy features setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyFeatures`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyFeatures`
+        
\ No newline at end of file
diff --git a/docs/account/settings/index.rst b/docs/account/settings/index.rst
index 2c53b1afa..abf97c6a0 100644
--- a/docs/account/settings/index.rst
+++ b/docs/account/settings/index.rst
@@ -11,5 +11,6 @@ Manage security settings for Accounts and Workspaces
    network_connectivity
    settings
    csp_enablement_account
+   disable_legacy_features
    esm_enablement_account
    personal_compute
\ No newline at end of file
diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst
index 9ef26a1ee..3df647279 100644
--- a/docs/account/settings/settings.rst
+++ b/docs/account/settings/settings.rst
@@ -16,6 +16,15 @@
         This settings can be disabled so that new workspaces do not have compliance security profile enabled by
         default.
 
+    .. py:property:: disable_legacy_features
+        :type: DisableLegacyFeaturesAPI
+
+        Disable legacy features for new Databricks workspaces.
+        
+        For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+        provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+        prior to 13.3LTS.
+
     .. py:property:: esm_enablement_account
         :type: EsmEnablementAccountAPI
 
diff --git a/docs/dataplane.md b/docs/dataplane.md
new file mode 100644
index 000000000..51e3d0225
--- /dev/null
+++ b/docs/dataplane.md
@@ -0,0 +1,27 @@
+# Data Plane APIs
+
+Some APIs such as Model Serving support direct Data Plane access for higher throughput and lower latency requests. 
+To access Data Plane access, a dedicated short-lived OAuth token must be used. The SDK is able to generate and refresh 
+such tokens transparently for the user.
+
+## Prerequisites
+Databricks SDK must be configured using a supported OAuth token. For more information, see
+[Supported Databricks authentication types](https://docs.databricks.com/en/dev-tools/auth/index.html)
+
+The desired service or endpoint must have direct Data Plane access enabled.
+
+## Usage
+Databricks SDK provides a separate service to be used for Data Plane access, which includes a `_data_plane` suffix.
+This service contains the subset of the methods for the original service which are supported in the Data Plane.
+
+Example:
+
+```python
+from databricks.sdk import WorkspaceClient
+# Control Plane
+w = WorkspaceClient()
+w.serving_endpoints.query(...)
+# Data Plane
+w.serving_endpoints_data_plane.query(...)
+```
+
diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst
new file mode 100644
index 000000000..2d522c625
--- /dev/null
+++ b/docs/dbdataclasses/apps.rst
@@ -0,0 +1,223 @@
+Apps
+====
+
+These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.apps`` module.
+
+.. py:currentmodule:: databricks.sdk.service.apps
+.. autoclass:: App
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppAccessControlRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppAccessControlResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppDeployment
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppDeploymentArtifacts
+   :members:
+   :undoc-members:
+
+.. py:class:: AppDeploymentMode
+
+   .. py:attribute:: AUTO_SYNC
+      :value: "AUTO_SYNC"
+
+   .. py:attribute:: SNAPSHOT
+      :value: "SNAPSHOT"
+
+.. py:class:: AppDeploymentState
+
+   .. py:attribute:: CANCELLED
+      :value: "CANCELLED"
+
+   .. py:attribute:: FAILED
+      :value: "FAILED"
+
+   .. py:attribute:: IN_PROGRESS
+      :value: "IN_PROGRESS"
+
+   .. py:attribute:: SUCCEEDED
+      :value: "SUCCEEDED"
+
+.. autoclass:: AppDeploymentStatus
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppPermission
+   :members:
+   :undoc-members:
+
+.. py:class:: AppPermissionLevel
+
+   Permission level
+
+   .. py:attribute:: CAN_MANAGE
+      :value: "CAN_MANAGE"
+
+   .. py:attribute:: CAN_USE
+      :value: "CAN_USE"
+
+.. autoclass:: AppPermissions
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppPermissionsDescription
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppPermissionsRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppResource
+   :members:
+   :undoc-members:
+
+.. autoclass:: AppResourceJob
+   :members:
+   :undoc-members:
+
+.. py:class:: AppResourceJobJobPermission
+
+   .. py:attribute:: CAN_MANAGE
+      :value: "CAN_MANAGE"
+
+   .. py:attribute:: CAN_MANAGE_RUN
+      :value: "CAN_MANAGE_RUN"
+
+   .. py:attribute:: CAN_VIEW
+      :value: "CAN_VIEW"
+
+   .. py:attribute:: IS_OWNER
+      :value: "IS_OWNER"
+
+.. autoclass:: AppResourceSecret
+   :members:
+   :undoc-members:
+
+.. py:class:: AppResourceSecretSecretPermission
+
+   Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE".
+
+   .. py:attribute:: MANAGE
+      :value: "MANAGE"
+
+   .. py:attribute:: READ
+      :value: "READ"
+
+   .. py:attribute:: WRITE
+      :value: "WRITE"
+
+.. autoclass:: AppResourceServingEndpoint
+   :members:
+   :undoc-members:
+
+.. py:class:: AppResourceServingEndpointServingEndpointPermission
+
+   .. py:attribute:: CAN_MANAGE
+      :value: "CAN_MANAGE"
+
+   .. py:attribute:: CAN_QUERY
+      :value: "CAN_QUERY"
+
+   .. py:attribute:: CAN_VIEW
+      :value: "CAN_VIEW"
+
+.. autoclass:: AppResourceSqlWarehouse
+   :members:
+   :undoc-members:
+
+.. py:class:: AppResourceSqlWarehouseSqlWarehousePermission
+
+   .. py:attribute:: CAN_MANAGE
+      :value: "CAN_MANAGE"
+
+   .. py:attribute:: CAN_USE
+      :value: "CAN_USE"
+
+   .. py:attribute:: IS_OWNER
+      :value: "IS_OWNER"
+
+.. py:class:: ApplicationState
+
+   .. py:attribute:: CRASHED
+      :value: "CRASHED"
+
+   .. py:attribute:: DEPLOYING
+      :value: "DEPLOYING"
+
+   .. py:attribute:: RUNNING
+      :value: "RUNNING"
+
+   .. py:attribute:: UNAVAILABLE
+      :value: "UNAVAILABLE"
+
+.. autoclass:: ApplicationStatus
+   :members:
+   :undoc-members:
+
+.. py:class:: ComputeState
+
+   .. py:attribute:: ACTIVE
+      :value: "ACTIVE"
+
+   .. py:attribute:: DELETING
+      :value: "DELETING"
+
+   .. py:attribute:: ERROR
+      :value: "ERROR"
+
+   .. py:attribute:: STARTING
+      :value: "STARTING"
+
+   .. py:attribute:: STOPPED
+      :value: "STOPPED"
+
+   .. py:attribute:: STOPPING
+      :value: "STOPPING"
+
+   .. py:attribute:: UPDATING
+      :value: "UPDATING"
+
+.. autoclass:: ComputeStatus
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateAppDeploymentRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateAppRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: GetAppPermissionLevelsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListAppDeploymentsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListAppsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: StartAppRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: StopAppRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateAppRequest
+   :members:
+   :undoc-members:
diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst
index 27abdd35a..25deb0a18 100644
--- a/docs/dbdataclasses/billing.rst
+++ b/docs/dbdataclasses/billing.rst
@@ -4,23 +4,84 @@ Billing
 These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.billing`` module.
 
 .. py:currentmodule:: databricks.sdk.service.billing
-.. autoclass:: Budget
+.. autoclass:: ActionConfiguration
    :members:
    :undoc-members:
 
-.. autoclass:: BudgetAlert
+.. py:class:: ActionConfigurationType
+
+   .. py:attribute:: EMAIL_NOTIFICATION
+      :value: "EMAIL_NOTIFICATION"
+
+.. autoclass:: AlertConfiguration
+   :members:
+   :undoc-members:
+
+.. py:class:: AlertConfigurationQuantityType
+
+   .. py:attribute:: LIST_PRICE_DOLLARS_USD
+      :value: "LIST_PRICE_DOLLARS_USD"
+
+.. py:class:: AlertConfigurationTimePeriod
+
+   .. py:attribute:: MONTH
+      :value: "MONTH"
+
+.. py:class:: AlertConfigurationTriggerType
+
+   .. py:attribute:: CUMULATIVE_SPENDING_EXCEEDED
+      :value: "CUMULATIVE_SPENDING_EXCEEDED"
+
+.. autoclass:: BudgetConfiguration
+   :members:
+   :undoc-members:
+
+.. autoclass:: BudgetConfigurationFilter
    :members:
    :undoc-members:
 
-.. autoclass:: BudgetList
+.. autoclass:: BudgetConfigurationFilterClause
    :members:
    :undoc-members:
 
-.. autoclass:: BudgetWithStatus
+.. py:class:: BudgetConfigurationFilterOperator
+
+   .. py:attribute:: IN
+      :value: "IN"
+
+.. autoclass:: BudgetConfigurationFilterTagClause
    :members:
    :undoc-members:
 
-.. autoclass:: BudgetWithStatusStatusDailyItem
+.. autoclass:: BudgetConfigurationFilterWorkspaceIdClause
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateBillingUsageDashboardRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateBillingUsageDashboardResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateBudgetConfigurationBudget
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateBudgetConfigurationBudgetActionConfigurations
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateBudgetConfigurationBudgetAlertConfigurations
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateBudgetConfigurationRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateBudgetConfigurationResponse
    :members:
    :undoc-members:
 
@@ -28,7 +89,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: DeleteResponse
+.. autoclass:: DeleteBudgetConfigurationResponse
    :members:
    :undoc-members:
 
@@ -55,6 +116,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GetBillingUsageDashboardResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: GetBudgetConfigurationResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListBudgetConfigurationsResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: LogDeliveryConfigStatus
 
    Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.
@@ -102,22 +175,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateLogDeliveryConfigurationStatusRequest
+.. autoclass:: UpdateBudgetConfigurationBudget
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateResponse
+.. autoclass:: UpdateBudgetConfigurationRequest
    :members:
    :undoc-members:
 
-.. autoclass:: WrappedBudget
+.. autoclass:: UpdateBudgetConfigurationResponse
    :members:
    :undoc-members:
 
-.. autoclass:: WrappedBudgetWithStatus
+.. autoclass:: UpdateLogDeliveryConfigurationStatusRequest
    :members:
    :undoc-members:
 
+.. py:class:: UsageDashboardType
+
+   .. py:attribute:: USAGE_DASHBOARD_TYPE_GLOBAL
+      :value: "USAGE_DASHBOARD_TYPE_GLOBAL"
+
+   .. py:attribute:: USAGE_DASHBOARD_TYPE_WORKSPACE
+      :value: "USAGE_DASHBOARD_TYPE_WORKSPACE"
+
 .. autoclass:: WrappedCreateLogDeliveryConfiguration
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index e2c120bc9..cb6399348 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -65,6 +65,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AwsCredentials
+   :members:
+   :undoc-members:
+
 .. autoclass:: AwsIamRoleRequest
    :members:
    :undoc-members:
@@ -85,6 +89,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AzureUserDelegationSas
+   :members:
+   :undoc-members:
+
 .. autoclass:: CancelRefreshResponse
    :members:
    :undoc-members:
@@ -249,9 +257,21 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CONNECTION_BIGQUERY
       :value: "CONNECTION_BIGQUERY"
 
+   .. py:attribute:: CONNECTION_BUILTIN_HIVE_METASTORE
+      :value: "CONNECTION_BUILTIN_HIVE_METASTORE"
+
    .. py:attribute:: CONNECTION_DATABRICKS
       :value: "CONNECTION_DATABRICKS"
 
+   .. py:attribute:: CONNECTION_EXTERNAL_HIVE_METASTORE
+      :value: "CONNECTION_EXTERNAL_HIVE_METASTORE"
+
+   .. py:attribute:: CONNECTION_GLUE
+      :value: "CONNECTION_GLUE"
+
+   .. py:attribute:: CONNECTION_HTTP_BEARER
+      :value: "CONNECTION_HTTP_BEARER"
+
    .. py:attribute:: CONNECTION_MYSQL
       :value: "CONNECTION_MYSQL"
 
@@ -283,6 +303,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: DATABRICKS
       :value: "DATABRICKS"
 
+   .. py:attribute:: GLUE
+      :value: "GLUE"
+
+   .. py:attribute:: HIVE_METASTORE
+      :value: "HIVE_METASTORE"
+
+   .. py:attribute:: HTTP
+      :value: "HTTP"
+
    .. py:attribute:: MYSQL
       :value: "MYSQL"
 
@@ -406,6 +435,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
    The type of credential.
 
+   .. py:attribute:: BEARER_TOKEN
+      :value: "BEARER_TOKEN"
+
    .. py:attribute:: USERNAME_PASSWORD
       :value: "USERNAME_PASSWORD"
 
@@ -647,6 +679,29 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: PARAM
       :value: "PARAM"
 
+.. autoclass:: GcpOauthToken
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenerateTemporaryTableCredentialRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenerateTemporaryTableCredentialResponse
+   :members:
+   :undoc-members:
+
+.. py:class:: GetBindingsSecurableType
+
+   .. py:attribute:: CATALOG
+      :value: "CATALOG"
+
+   .. py:attribute:: EXTERNAL_LOCATION
+      :value: "EXTERNAL_LOCATION"
+
+   .. py:attribute:: STORAGE_CREDENTIAL
+      :value: "STORAGE_CREDENTIAL"
+
 .. autoclass:: GetMetastoreSummaryResponse
    :members:
    :undoc-members:
@@ -661,6 +716,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: INTERNAL_AND_EXTERNAL
       :value: "INTERNAL_AND_EXTERNAL"
 
+.. autoclass:: GetQuotaResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: IsolationMode
 
    Whether the current securable is accessible from all workspaces or a specific set of workspaces.
@@ -703,6 +762,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListQuotasResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListRegisteredModelsResponse
    :members:
    :undoc-members:
@@ -940,9 +1003,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ONLINE_PIPELINE_FAILED
       :value: "ONLINE_PIPELINE_FAILED"
 
-   .. py:attribute:: ONLINE_TABLE_STATE_UNSPECIFIED
-      :value: "ONLINE_TABLE_STATE_UNSPECIFIED"
-
    .. py:attribute:: ONLINE_TRIGGERED_UPDATE
       :value: "ONLINE_TRIGGERED_UPDATE"
 
@@ -1052,6 +1112,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: EXECUTE
       :value: "EXECUTE"
 
+   .. py:attribute:: MANAGE
+      :value: "MANAGE"
+
    .. py:attribute:: MANAGE_ALLOWLIST
       :value: "MANAGE_ALLOWLIST"
 
@@ -1131,13 +1194,29 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: PROVISIONING
       :value: "PROVISIONING"
 
-   .. py:attribute:: STATE_UNSPECIFIED
-      :value: "STATE_UNSPECIFIED"
+   .. py:attribute:: UPDATING
+      :value: "UPDATING"
 
 .. autoclass:: ProvisioningStatus
    :members:
    :undoc-members:
 
+.. autoclass:: QuotaInfo
+   :members:
+   :undoc-members:
+
+.. autoclass:: R2Credentials
+   :members:
+   :undoc-members:
+
+.. autoclass:: RegenerateDashboardRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: RegenerateDashboardResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: RegisteredModelAlias
    :members:
    :undoc-members:
@@ -1258,6 +1337,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: TableOperation
+
+   .. py:attribute:: READ
+      :value: "READ"
+
+   .. py:attribute:: READ_WRITE
+      :value: "READ_WRITE"
+
 .. autoclass:: TableRowFilter
    :members:
    :undoc-members:
@@ -1304,6 +1391,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: UpdateBindingsSecurableType
+
+   .. py:attribute:: CATALOG
+      :value: "CATALOG"
+
+   .. py:attribute:: EXTERNAL_LOCATION
+      :value: "EXTERNAL_LOCATION"
+
+   .. py:attribute:: STORAGE_CREDENTIAL
+      :value: "STORAGE_CREDENTIAL"
+
 .. autoclass:: UpdateCatalog
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst
index 64ab42682..0066f0374 100644
--- a/docs/dbdataclasses/compute.rst
+++ b/docs/dbdataclasses/compute.rst
@@ -103,6 +103,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ClusterCompliance
+   :members:
+   :undoc-members:
+
 .. autoclass:: ClusterDetails
    :members:
    :undoc-members:
@@ -179,6 +183,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ClusterSettingsChange
+   :members:
+   :undoc-members:
+
 .. autoclass:: ClusterSize
    :members:
    :undoc-members:
@@ -443,6 +451,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: EnforceClusterComplianceRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: EnforceClusterComplianceResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: Environment
    :members:
    :undoc-members:
@@ -565,6 +581,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GetClusterComplianceResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: GetClusterPermissionLevelsResponse
    :members:
    :undoc-members:
@@ -817,10 +837,42 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListClusterCompliancesResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListClustersFilterBy
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListClustersResponse
    :members:
    :undoc-members:
 
+.. autoclass:: ListClustersSortBy
+   :members:
+   :undoc-members:
+
+.. py:class:: ListClustersSortByDirection
+
+   The direction to sort by.
+
+   .. py:attribute:: ASC
+      :value: "ASC"
+
+   .. py:attribute:: DESC
+      :value: "DESC"
+
+.. py:class:: ListClustersSortByField
+
+   The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest precedence: cluster state, pinned or unpinned, then cluster name.
+
+   .. py:attribute:: CLUSTER_NAME
+      :value: "CLUSTER_NAME"
+
+   .. py:attribute:: DEFAULT
+      :value: "DEFAULT"
+
 .. autoclass:: ListGlobalInitScriptsResponse
    :members:
    :undoc-members:
@@ -855,6 +907,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ListSortOrder
 
+   A generic ordering enum for list-based queries.
+
    .. py:attribute:: ASC
       :value: "ASC"
 
@@ -968,7 +1022,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: RuntimeEngine
 
-   Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine is inferred from spark_version.
+   Determines the cluster's runtime engine, either standard or Photon.
+   This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+   If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.
 
    .. py:attribute:: NULL
       :value: "NULL"
@@ -1308,6 +1364,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: UpdateCluster
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateClusterResource
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateClusterResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index dca31d64b..91de6ccb2 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -29,9 +29,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: DASHBOARD_VIEW_BASIC
       :value: "DASHBOARD_VIEW_BASIC"
 
-   .. py:attribute:: DASHBOARD_VIEW_FULL
-      :value: "DASHBOARD_VIEW_FULL"
-
 .. autoclass:: DeleteScheduleResponse
    :members:
    :undoc-members:
@@ -40,6 +37,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GenieAttachment
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenieConversation
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenieCreateConversationMessageRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenieGetMessageQueryResultResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenieMessage
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenieStartConversationMessageRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenieStartConversationResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: LifecycleState
 
    .. py:attribute:: ACTIVE
@@ -60,6 +85,160 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: MessageError
+   :members:
+   :undoc-members:
+
+.. py:class:: MessageErrorType
+
+   .. py:attribute:: BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION
+      :value: "BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION"
+
+   .. py:attribute:: CHAT_COMPLETION_CLIENT_EXCEPTION
+      :value: "CHAT_COMPLETION_CLIENT_EXCEPTION"
+
+   .. py:attribute:: CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION
+      :value: "CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION"
+
+   .. py:attribute:: CHAT_COMPLETION_NETWORK_EXCEPTION
+      :value: "CHAT_COMPLETION_NETWORK_EXCEPTION"
+
+   .. py:attribute:: CONTENT_FILTER_EXCEPTION
+      :value: "CONTENT_FILTER_EXCEPTION"
+
+   .. py:attribute:: CONTEXT_EXCEEDED_EXCEPTION
+      :value: "CONTEXT_EXCEEDED_EXCEPTION"
+
+   .. py:attribute:: COULD_NOT_GET_UC_SCHEMA_EXCEPTION
+      :value: "COULD_NOT_GET_UC_SCHEMA_EXCEPTION"
+
+   .. py:attribute:: DEPLOYMENT_NOT_FOUND_EXCEPTION
+      :value: "DEPLOYMENT_NOT_FOUND_EXCEPTION"
+
+   .. py:attribute:: FUNCTIONS_NOT_AVAILABLE_EXCEPTION
+      :value: "FUNCTIONS_NOT_AVAILABLE_EXCEPTION"
+
+   .. py:attribute:: FUNCTION_ARGUMENTS_INVALID_EXCEPTION
+      :value: "FUNCTION_ARGUMENTS_INVALID_EXCEPTION"
+
+   .. py:attribute:: FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION
+      :value: "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION"
+
+   .. py:attribute:: FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION
+      :value: "FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION"
+
+   .. py:attribute:: GENERIC_CHAT_COMPLETION_EXCEPTION
+      :value: "GENERIC_CHAT_COMPLETION_EXCEPTION"
+
+   .. py:attribute:: GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION
+      :value: "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION"
+
+   .. py:attribute:: GENERIC_SQL_EXEC_API_CALL_EXCEPTION
+      :value: "GENERIC_SQL_EXEC_API_CALL_EXCEPTION"
+
+   .. py:attribute:: ILLEGAL_PARAMETER_DEFINITION_EXCEPTION
+      :value: "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION"
+
+   .. py:attribute:: INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION
+      :value: "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION"
+
+   .. py:attribute:: INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION
+      :value: "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION"
+
+   .. py:attribute:: INVALID_CHAT_COMPLETION_JSON_EXCEPTION
+      :value: "INVALID_CHAT_COMPLETION_JSON_EXCEPTION"
+
+   .. py:attribute:: INVALID_COMPLETION_REQUEST_EXCEPTION
+      :value: "INVALID_COMPLETION_REQUEST_EXCEPTION"
+
+   .. py:attribute:: INVALID_FUNCTION_CALL_EXCEPTION
+      :value: "INVALID_FUNCTION_CALL_EXCEPTION"
+
+   .. py:attribute:: INVALID_TABLE_IDENTIFIER_EXCEPTION
+      :value: "INVALID_TABLE_IDENTIFIER_EXCEPTION"
+
+   .. py:attribute:: LOCAL_CONTEXT_EXCEEDED_EXCEPTION
+      :value: "LOCAL_CONTEXT_EXCEEDED_EXCEPTION"
+
+   .. py:attribute:: MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION
+      :value: "MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION"
+
+   .. py:attribute:: MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION
+      :value: "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION"
+
+   .. py:attribute:: NO_QUERY_TO_VISUALIZE_EXCEPTION
+      :value: "NO_QUERY_TO_VISUALIZE_EXCEPTION"
+
+   .. py:attribute:: NO_TABLES_TO_QUERY_EXCEPTION
+      :value: "NO_TABLES_TO_QUERY_EXCEPTION"
+
+   .. py:attribute:: RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION
+      :value: "RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION"
+
+   .. py:attribute:: RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION
+      :value: "RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION"
+
+   .. py:attribute:: REPLY_PROCESS_TIMEOUT_EXCEPTION
+      :value: "REPLY_PROCESS_TIMEOUT_EXCEPTION"
+
+   .. py:attribute:: RETRYABLE_PROCESSING_EXCEPTION
+      :value: "RETRYABLE_PROCESSING_EXCEPTION"
+
+   .. py:attribute:: SQL_EXECUTION_EXCEPTION
+      :value: "SQL_EXECUTION_EXCEPTION"
+
+   .. py:attribute:: TABLES_MISSING_EXCEPTION
+      :value: "TABLES_MISSING_EXCEPTION"
+
+   .. py:attribute:: TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION
+      :value: "TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION"
+
+   .. py:attribute:: TOO_MANY_TABLES_EXCEPTION
+      :value: "TOO_MANY_TABLES_EXCEPTION"
+
+   .. py:attribute:: UNEXPECTED_REPLY_PROCESS_EXCEPTION
+      :value: "UNEXPECTED_REPLY_PROCESS_EXCEPTION"
+
+   .. py:attribute:: UNKNOWN_AI_MODEL
+      :value: "UNKNOWN_AI_MODEL"
+
+   .. py:attribute:: WAREHOUSE_ACCESS_MISSING_EXCEPTION
+      :value: "WAREHOUSE_ACCESS_MISSING_EXCEPTION"
+
+   .. py:attribute:: WAREHOUSE_NOT_FOUND_EXCEPTION
+      :value: "WAREHOUSE_NOT_FOUND_EXCEPTION"
+
+.. py:class:: MessageStatus
+
+   MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled.
+
+   .. py:attribute:: ASKING_AI
+      :value: "ASKING_AI"
+
+   .. py:attribute:: CANCELLED
+      :value: "CANCELLED"
+
+   .. py:attribute:: COMPLETED
+      :value: "COMPLETED"
+
+   .. py:attribute:: EXECUTING_QUERY
+      :value: "EXECUTING_QUERY"
+
+   .. py:attribute:: FAILED
+      :value: "FAILED"
+
+   .. py:attribute:: FETCHING_METADATA
+      :value: "FETCHING_METADATA"
+
+   .. py:attribute:: FILTERING_CONTEXT
+      :value: "FILTERING_CONTEXT"
+
+   .. py:attribute:: QUERY_RESULT_EXPIRED
+      :value: "QUERY_RESULT_EXPIRED"
+
+   .. py:attribute:: SUBMITTED
+      :value: "SUBMITTED"
+
 .. autoclass:: MigrateDashboardRequest
    :members:
    :undoc-members:
@@ -72,6 +251,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: QueryAttachment
+   :members:
+   :undoc-members:
+
+.. autoclass:: Result
+   :members:
+   :undoc-members:
+
 .. autoclass:: Schedule
    :members:
    :undoc-members:
@@ -100,6 +287,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: TextAttachment
+   :members:
+   :undoc-members:
+
 .. autoclass:: TrashDashboardResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst
index 9cafb78df..643da3d47 100644
--- a/docs/dbdataclasses/iam.rst
+++ b/docs/dbdataclasses/iam.rst
@@ -20,7 +20,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: DeleteWorkspaceAssignments
+.. autoclass:: DeleteWorkspacePermissionAssignmentResponse
    :members:
    :undoc-members:
 
@@ -82,6 +82,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: MigratePermissionsRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: MigratePermissionsResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: Name
    :members:
    :undoc-members:
@@ -191,6 +199,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CAN_MANAGE_STAGING_VERSIONS
       :value: "CAN_MANAGE_STAGING_VERSIONS"
 
+   .. py:attribute:: CAN_MONITOR
+      :value: "CAN_MONITOR"
+
    .. py:attribute:: CAN_QUERY
       :value: "CAN_QUERY"
 
@@ -215,14 +226,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: IS_OWNER
       :value: "IS_OWNER"
 
-.. autoclass:: PermissionMigrationRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: PermissionMigrationResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: PermissionOutput
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/index.rst b/docs/dbdataclasses/index.rst
index 893e488d7..987bee7f5 100644
--- a/docs/dbdataclasses/index.rst
+++ b/docs/dbdataclasses/index.rst
@@ -5,6 +5,7 @@ Dataclasses
 .. toctree::
    :maxdepth: 1
    
+   apps
    billing
    catalog
    compute
diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst
index 81d81020a..3aa0db043 100644
--- a/docs/dbdataclasses/jobs.rst
+++ b/docs/dbdataclasses/jobs.rst
@@ -111,6 +111,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: EnforcePolicyComplianceForJobResponseJobClusterSettingsChange
+   :members:
+   :undoc-members:
+
+.. autoclass:: EnforcePolicyComplianceRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: EnforcePolicyComplianceResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ExportRunOutput
    :members:
    :undoc-members:
@@ -147,6 +159,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GetPolicyComplianceResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: GitProvider
 
    .. py:attribute:: AWS_CODE_COMMIT
@@ -197,6 +213,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: JobCompliance
+   :members:
+   :undoc-members:
+
 .. autoclass:: JobDeployment
    :members:
    :undoc-members:
@@ -329,6 +349,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListJobComplianceForPolicyResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListJobsResponse
    :members:
    :undoc-members:
@@ -365,9 +389,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: HOURS
       :value: "HOURS"
 
-   .. py:attribute:: TIME_UNIT_UNSPECIFIED
-      :value: "TIME_UNIT_UNSPECIFIED"
-
    .. py:attribute:: WEEKS
       :value: "WEEKS"
 
@@ -383,6 +404,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: QueueDetails
+   :members:
+   :undoc-members:
+
+.. py:class:: QueueDetailsCodeCode
+
+   The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was queued due to reaching the per-job limit of concurrent job runs. * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of active run job tasks.
+
+   .. py:attribute:: ACTIVE_RUNS_LIMIT_REACHED
+      :value: "ACTIVE_RUNS_LIMIT_REACHED"
+
+   .. py:attribute:: ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED
+      :value: "ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED"
+
+   .. py:attribute:: MAX_CONCURRENT_RUNS_REACHED
+      :value: "MAX_CONCURRENT_RUNS_REACHED"
+
 .. autoclass:: QueueSettings
    :members:
    :undoc-members:
@@ -523,6 +561,28 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: WAITING_FOR_RETRY
       :value: "WAITING_FOR_RETRY"
 
+.. py:class:: RunLifecycleStateV2State
+
+   The current state of the run.
+
+   .. py:attribute:: BLOCKED
+      :value: "BLOCKED"
+
+   .. py:attribute:: PENDING
+      :value: "PENDING"
+
+   .. py:attribute:: QUEUED
+      :value: "QUEUED"
+
+   .. py:attribute:: RUNNING
+      :value: "RUNNING"
+
+   .. py:attribute:: TERMINATED
+      :value: "TERMINATED"
+
+   .. py:attribute:: TERMINATING
+      :value: "TERMINATING"
+
 .. autoclass:: RunNow
    :members:
    :undoc-members:
@@ -541,11 +601,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: RunResultState
 
-   A value indicating the run's result. The possible values are: * `SUCCESS`: The task completed successfully. * `FAILED`: The task completed with an error. * `TIMEDOUT`: The run was stopped after reaching the timeout. * `CANCELED`: The run was canceled at user request. * `MAXIMUM_CONCURRENT_RUNS_REACHED`: The run was skipped because the maximum concurrent runs were reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. * `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled.
+   A value indicating the run's result. The possible values are: * `SUCCESS`: The task completed successfully. * `FAILED`: The task completed with an error. * `TIMEDOUT`: The run was stopped after reaching the timeout. * `CANCELED`: The run was canceled at user request. * `MAXIMUM_CONCURRENT_RUNS_REACHED`: The run was skipped because the maximum concurrent runs were reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. * `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. * `DISABLED`: The run was skipped because it was disabled explicitly by the user.
 
    .. py:attribute:: CANCELED
       :value: "CANCELED"
 
+   .. py:attribute:: DISABLED
+      :value: "DISABLED"
+
    .. py:attribute:: EXCLUDED
       :value: "EXCLUDED"
 
@@ -574,6 +637,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: RunStatus
+   :members:
+   :undoc-members:
+
 .. autoclass:: RunTask
    :members:
    :undoc-members:
@@ -730,6 +797,98 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: TerminationCodeCode
+
+   The code indicates why the run was terminated. Additional codes might be introduced in future releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was successfully canceled during execution by a user. * `CANCELED`: The run was canceled during execution by the Databricks platform; for example, if the maximum run duration was exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of concurrent active runs. Consider scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer to the state message for further details. The causes might include, but are not limited to: The provided library is invalid, there are insufficient permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit.
+   [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now
+
+   .. py:attribute:: CANCELED
+      :value: "CANCELED"
+
+   .. py:attribute:: CLOUD_FAILURE
+      :value: "CLOUD_FAILURE"
+
+   .. py:attribute:: CLUSTER_ERROR
+      :value: "CLUSTER_ERROR"
+
+   .. py:attribute:: CLUSTER_REQUEST_LIMIT_EXCEEDED
+      :value: "CLUSTER_REQUEST_LIMIT_EXCEEDED"
+
+   .. py:attribute:: DRIVER_ERROR
+      :value: "DRIVER_ERROR"
+
+   .. py:attribute:: FEATURE_DISABLED
+      :value: "FEATURE_DISABLED"
+
+   .. py:attribute:: INTERNAL_ERROR
+      :value: "INTERNAL_ERROR"
+
+   .. py:attribute:: INVALID_CLUSTER_REQUEST
+      :value: "INVALID_CLUSTER_REQUEST"
+
+   .. py:attribute:: INVALID_RUN_CONFIGURATION
+      :value: "INVALID_RUN_CONFIGURATION"
+
+   .. py:attribute:: LIBRARY_INSTALLATION_ERROR
+      :value: "LIBRARY_INSTALLATION_ERROR"
+
+   .. py:attribute:: MAX_CONCURRENT_RUNS_EXCEEDED
+      :value: "MAX_CONCURRENT_RUNS_EXCEEDED"
+
+   .. py:attribute:: MAX_JOB_QUEUE_SIZE_EXCEEDED
+      :value: "MAX_JOB_QUEUE_SIZE_EXCEEDED"
+
+   .. py:attribute:: MAX_SPARK_CONTEXTS_EXCEEDED
+      :value: "MAX_SPARK_CONTEXTS_EXCEEDED"
+
+   .. py:attribute:: REPOSITORY_CHECKOUT_FAILED
+      :value: "REPOSITORY_CHECKOUT_FAILED"
+
+   .. py:attribute:: RESOURCE_NOT_FOUND
+      :value: "RESOURCE_NOT_FOUND"
+
+   .. py:attribute:: RUN_EXECUTION_ERROR
+      :value: "RUN_EXECUTION_ERROR"
+
+   .. py:attribute:: SKIPPED
+      :value: "SKIPPED"
+
+   .. py:attribute:: STORAGE_ACCESS_ERROR
+      :value: "STORAGE_ACCESS_ERROR"
+
+   .. py:attribute:: SUCCESS
+      :value: "SUCCESS"
+
+   .. py:attribute:: UNAUTHORIZED_ERROR
+      :value: "UNAUTHORIZED_ERROR"
+
+   .. py:attribute:: USER_CANCELED
+      :value: "USER_CANCELED"
+
+   .. py:attribute:: WORKSPACE_RUN_LIMIT_EXCEEDED
+      :value: "WORKSPACE_RUN_LIMIT_EXCEEDED"
+
+.. autoclass:: TerminationDetails
+   :members:
+   :undoc-members:
+
+.. py:class:: TerminationTypeType
+
+   * `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the Databricks platform. Please look at the [status page] or contact support if the issue persists. * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud provider.
+   [status page]: https://status.databricks.com/
+
+   .. py:attribute:: CLIENT_ERROR
+      :value: "CLIENT_ERROR"
+
+   .. py:attribute:: CLOUD_FAILURE
+      :value: "CLOUD_FAILURE"
+
+   .. py:attribute:: INTERNAL_ERROR
+      :value: "INTERNAL_ERROR"
+
+   .. py:attribute:: SUCCESS
+      :value: "SUCCESS"
+
 .. autoclass:: TriggerInfo
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst
index 5204dd1ee..bb48967db 100644
--- a/docs/dbdataclasses/marketplace.rst
+++ b/docs/dbdataclasses/marketplace.rst
@@ -29,9 +29,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ASSET_TYPE_NOTEBOOK
       :value: "ASSET_TYPE_NOTEBOOK"
 
-   .. py:attribute:: ASSET_TYPE_UNSPECIFIED
-      :value: "ASSET_TYPE_UNSPECIFIED"
-
 .. autoclass:: BatchGetListingsResponse
    :members:
    :undoc-members:
@@ -288,11 +285,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: FILE_STATUS_STAGING
       :value: "FILE_STATUS_STAGING"
 
-.. py:class:: FilterType
-
-   .. py:attribute:: METASTORE
-      :value: "METASTORE"
-
 .. py:class:: FulfillmentType
 
    .. py:attribute:: INSTALL
@@ -453,9 +445,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: LISTING_TAG_TYPE_TASK
       :value: "LISTING_TAG_TYPE_TASK"
 
-   .. py:attribute:: LISTING_TAG_TYPE_UNSPECIFIED
-      :value: "LISTING_TAG_TYPE_UNSPECIFIED"
-
 .. py:class:: ListingType
 
    .. py:attribute:: PERSONALIZED
@@ -494,29 +483,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: ProviderIconFile
-   :members:
-   :undoc-members:
-
-.. py:class:: ProviderIconType
-
-   .. py:attribute:: DARK
-      :value: "DARK"
-
-   .. py:attribute:: PRIMARY
-      :value: "PRIMARY"
-
-   .. py:attribute:: PROVIDER_ICON_TYPE_UNSPECIFIED
-      :value: "PROVIDER_ICON_TYPE_UNSPECIFIED"
-
 .. autoclass:: ProviderInfo
    :members:
    :undoc-members:
 
-.. autoclass:: ProviderListingSummaryInfo
-   :members:
-   :undoc-members:
-
 .. autoclass:: RegionInfo
    :members:
    :undoc-members:
@@ -545,20 +515,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. py:class:: SortBy
-
-   .. py:attribute:: SORT_BY_DATE
-      :value: "SORT_BY_DATE"
-
-   .. py:attribute:: SORT_BY_RELEVANCE
-      :value: "SORT_BY_RELEVANCE"
-
-   .. py:attribute:: SORT_BY_TITLE
-      :value: "SORT_BY_TITLE"
-
-   .. py:attribute:: SORT_BY_UNSPECIFIED
-      :value: "SORT_BY_UNSPECIFIED"
-
 .. autoclass:: TokenDetail
    :members:
    :undoc-members:
@@ -630,7 +586,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
    .. py:attribute:: PUBLIC
       :value: "PUBLIC"
-
-.. autoclass:: VisibilityFilter
-   :members:
-   :undoc-members:
diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst
index 385bf2021..9f419f160 100644
--- a/docs/dbdataclasses/pipelines.rst
+++ b/docs/dbdataclasses/pipelines.rst
@@ -97,19 +97,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: ListPipelineEventsResponse
+.. autoclass:: IngestionPipelineDefinition
    :members:
    :undoc-members:
 
-.. autoclass:: ListPipelinesResponse
+.. autoclass:: ListPipelineEventsResponse
    :members:
    :undoc-members:
 
-.. autoclass:: ListUpdatesResponse
+.. autoclass:: ListPipelinesResponse
    :members:
    :undoc-members:
 
-.. autoclass:: ManagedIngestionPipelineDefinition
+.. autoclass:: ListUpdatesResponse
    :members:
    :undoc-members:
 
@@ -251,10 +251,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: PipelineStateInfoHealth
+
+   The health of a pipeline.
+
+   .. py:attribute:: HEALTHY
+      :value: "HEALTHY"
+
+   .. py:attribute:: UNHEALTHY
+      :value: "UNHEALTHY"
+
 .. autoclass:: PipelineTrigger
    :members:
    :undoc-members:
 
+.. autoclass:: ReportSpec
+   :members:
+   :undoc-members:
+
 .. autoclass:: SchemaSpec
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst
index 46cfe6a35..3deefc873 100644
--- a/docs/dbdataclasses/serving.rst
+++ b/docs/dbdataclasses/serving.rst
@@ -8,105 +8,82 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: AmazonBedrockConfig
+.. autoclass:: AiGatewayConfig
    :members:
    :undoc-members:
 
-.. py:class:: AmazonBedrockConfigBedrockProvider
-
-   The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
+.. autoclass:: AiGatewayGuardrailParameters
+   :members:
+   :undoc-members:
 
-   .. py:attribute:: AI21LABS
-      :value: "AI21LABS"
+.. autoclass:: AiGatewayGuardrailPiiBehavior
+   :members:
+   :undoc-members:
 
-   .. py:attribute:: AMAZON
-      :value: "AMAZON"
+.. py:class:: AiGatewayGuardrailPiiBehaviorBehavior
 
-   .. py:attribute:: ANTHROPIC
-      :value: "ANTHROPIC"
+   Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
 
-   .. py:attribute:: COHERE
-      :value: "COHERE"
+   .. py:attribute:: BLOCK
+      :value: "BLOCK"
 
-.. autoclass:: AnthropicConfig
-   :members:
-   :undoc-members:
+   .. py:attribute:: NONE
+      :value: "NONE"
 
-.. autoclass:: App
+.. autoclass:: AiGatewayGuardrails
    :members:
    :undoc-members:
 
-.. autoclass:: AppDeployment
+.. autoclass:: AiGatewayInferenceTableConfig
    :members:
    :undoc-members:
 
-.. autoclass:: AppDeploymentArtifacts
+.. autoclass:: AiGatewayRateLimit
    :members:
    :undoc-members:
 
-.. py:class:: AppDeploymentMode
-
-   .. py:attribute:: AUTO_SYNC
-      :value: "AUTO_SYNC"
+.. py:class:: AiGatewayRateLimitKey
 
-   .. py:attribute:: MODE_UNSPECIFIED
-      :value: "MODE_UNSPECIFIED"
+   Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
 
-   .. py:attribute:: SNAPSHOT
-      :value: "SNAPSHOT"
-
-.. py:class:: AppDeploymentState
-
-   .. py:attribute:: FAILED
-      :value: "FAILED"
+   .. py:attribute:: ENDPOINT
+      :value: "ENDPOINT"
 
-   .. py:attribute:: IN_PROGRESS
-      :value: "IN_PROGRESS"
+   .. py:attribute:: USER
+      :value: "USER"
 
-   .. py:attribute:: STATE_UNSPECIFIED
-      :value: "STATE_UNSPECIFIED"
+.. py:class:: AiGatewayRateLimitRenewalPeriod
 
-   .. py:attribute:: STOPPED
-      :value: "STOPPED"
+   Renewal period field for a rate limit. Currently, only 'minute' is supported.
 
-   .. py:attribute:: SUCCEEDED
-      :value: "SUCCEEDED"
+   .. py:attribute:: MINUTE
+      :value: "MINUTE"
 
-.. autoclass:: AppDeploymentStatus
+.. autoclass:: AiGatewayUsageTrackingConfig
    :members:
    :undoc-members:
 
-.. autoclass:: AppEnvironment
+.. autoclass:: AmazonBedrockConfig
    :members:
    :undoc-members:
 
-.. py:class:: AppState
-
-   .. py:attribute:: CREATING
-      :value: "CREATING"
-
-   .. py:attribute:: DELETED
-      :value: "DELETED"
-
-   .. py:attribute:: DELETING
-      :value: "DELETING"
+.. py:class:: AmazonBedrockConfigBedrockProvider
 
-   .. py:attribute:: ERROR
-      :value: "ERROR"
+   The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
 
-   .. py:attribute:: IDLE
-      :value: "IDLE"
+   .. py:attribute:: AI21LABS
+      :value: "AI21LABS"
 
-   .. py:attribute:: RUNNING
-      :value: "RUNNING"
+   .. py:attribute:: AMAZON
+      :value: "AMAZON"
 
-   .. py:attribute:: STARTING
-      :value: "STARTING"
+   .. py:attribute:: ANTHROPIC
+      :value: "ANTHROPIC"
 
-   .. py:attribute:: STATE_UNSPECIFIED
-      :value: "STATE_UNSPECIFIED"
+   .. py:attribute:: COHERE
+      :value: "COHERE"
 
-.. autoclass:: AppStatus
+.. autoclass:: AnthropicConfig
    :members:
    :undoc-members:
 
@@ -147,14 +124,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateAppDeploymentRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: CreateAppRequest
-   :members:
-   :undoc-members:
-
 .. autoclass:: CreateServingEndpoint
    :members:
    :undoc-members:
@@ -212,6 +181,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: NOT_UPDATING
       :value: "NOT_UPDATING"
 
+   .. py:attribute:: UPDATE_CANCELED
+      :value: "UPDATE_CANCELED"
+
    .. py:attribute:: UPDATE_FAILED
       :value: "UPDATE_FAILED"
 
@@ -229,10 +201,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: EnvVariable
-   :members:
-   :undoc-members:
-
 .. autoclass:: ExportMetricsResponse
    :members:
    :undoc-members:
@@ -243,7 +211,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ExternalModelProvider
 
-   The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and 'palm'.",
+   The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
 
    .. py:attribute:: AI21LABS
       :value: "AI21LABS"
@@ -260,6 +228,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: DATABRICKS_MODEL_SERVING
       :value: "DATABRICKS_MODEL_SERVING"
 
+   .. py:attribute:: GOOGLE_CLOUD_VERTEX_AI
+      :value: "GOOGLE_CLOUD_VERTEX_AI"
+
    .. py:attribute:: OPENAI
       :value: "OPENAI"
 
@@ -282,11 +253,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: ListAppDeploymentsResponse
-   :members:
-   :undoc-members:
-
-.. autoclass:: ListAppsResponse
+.. autoclass:: GoogleCloudVertexAiConfig
    :members:
    :undoc-members:
 
@@ -314,6 +281,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: PutAiGatewayResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: PutResponse
    :members:
    :undoc-members:
@@ -506,26 +477,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: StartAppRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: StopAppRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: StopAppResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: TrafficConfig
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateAppRequest
-   :members:
-   :undoc-members:
-
 .. autoclass:: V1ResponseChoiceElement
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst
index cc142abf3..12043e3c5 100644
--- a/docs/dbdataclasses/settings.rst
+++ b/docs/dbdataclasses/settings.rst
@@ -8,6 +8,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: BooleanMessage
+   :members:
+   :undoc-members:
+
 .. autoclass:: ClusterAutoRestartMessage
    :members:
    :undoc-members:
@@ -22,9 +26,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ClusterAutoRestartMessageMaintenanceWindowDayOfWeek
 
-   .. py:attribute:: DAY_OF_WEEK_UNSPECIFIED
-      :value: "DAY_OF_WEEK_UNSPECIFIED"
-
    .. py:attribute:: FRIDAY
       :value: "FRIDAY"
 
@@ -73,9 +74,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: THIRD_OF_MONTH
       :value: "THIRD_OF_MONTH"
 
-   .. py:attribute:: WEEK_DAY_FREQUENCY_UNSPECIFIED
-      :value: "WEEK_DAY_FREQUENCY_UNSPECIFIED"
-
 .. autoclass:: ClusterAutoRestartMessageMaintenanceWindowWindowStartTime
    :members:
    :undoc-members:
@@ -92,8 +90,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
    Compliance stardard for SHIELD customers
 
-   .. py:attribute:: COMPLIANCE_STANDARD_UNSPECIFIED
-      :value: "COMPLIANCE_STANDARD_UNSPECIFIED"
+   .. py:attribute:: CANADA_PROTECTED_B
+      :value: "CANADA_PROTECTED_B"
 
    .. py:attribute:: CYBER_ESSENTIAL_PLUS
       :value: "CYBER_ESSENTIAL_PLUS"
@@ -122,6 +120,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: PCI_DSS
       :value: "PCI_DSS"
 
+.. autoclass:: Config
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateIpAccessList
    :members:
    :undoc-members:
@@ -134,6 +136,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CreateNotificationDestinationRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateOboTokenRequest
    :members:
    :undoc-members:
@@ -186,6 +192,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteDisableLegacyAccessResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteDisableLegacyDbfsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteDisableLegacyFeaturesResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteNetworkConnectivityConfigurationResponse
    :members:
    :undoc-members:
@@ -202,6 +220,43 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: DestinationType
+
+   .. py:attribute:: EMAIL
+      :value: "EMAIL"
+
+   .. py:attribute:: MICROSOFT_TEAMS
+      :value: "MICROSOFT_TEAMS"
+
+   .. py:attribute:: PAGERDUTY
+      :value: "PAGERDUTY"
+
+   .. py:attribute:: SLACK
+      :value: "SLACK"
+
+   .. py:attribute:: WEBHOOK
+      :value: "WEBHOOK"
+
+.. autoclass:: DisableLegacyAccess
+   :members:
+   :undoc-members:
+
+.. autoclass:: DisableLegacyDbfs
+   :members:
+   :undoc-members:
+
+.. autoclass:: DisableLegacyFeatures
+   :members:
+   :undoc-members:
+
+.. autoclass:: EmailConfig
+   :members:
+   :undoc-members:
+
+.. autoclass:: Empty
+   :members:
+   :undoc-members:
+
 .. autoclass:: EnhancedSecurityMonitoring
    :members:
    :undoc-members:
@@ -234,6 +289,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GenericWebhookConfig
+   :members:
+   :undoc-members:
+
 .. autoclass:: GetIpAccessListResponse
    :members:
    :undoc-members:
@@ -266,6 +325,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListNotificationDestinationsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListNotificationDestinationsResult
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListPublicTokensResponse
    :members:
    :undoc-members:
@@ -285,6 +352,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: BLOCK
       :value: "BLOCK"
 
+.. autoclass:: MicrosoftTeamsConfig
+   :members:
+   :undoc-members:
+
 .. autoclass:: NccAwsStableIpRule
    :members:
    :undoc-members:
@@ -349,6 +420,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: NotificationDestination
+   :members:
+   :undoc-members:
+
+.. autoclass:: PagerdutyConfig
+   :members:
+   :undoc-members:
+
 .. autoclass:: PartitionId
    :members:
    :undoc-members:
@@ -395,9 +474,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: RESTRICT_TOKENS_AND_JOB_RUN_AS
       :value: "RESTRICT_TOKENS_AND_JOB_RUN_AS"
 
-   .. py:attribute:: STATUS_UNSPECIFIED
-      :value: "STATUS_UNSPECIFIED"
-
 .. autoclass:: RestrictWorkspaceAdminsSetting
    :members:
    :undoc-members:
@@ -414,6 +490,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: SlackConfig
+   :members:
+   :undoc-members:
+
 .. autoclass:: StringMessage
    :members:
    :undoc-members:
@@ -457,6 +537,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
    The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported.
 
+   .. py:attribute:: ARCLIGHT_AZURE_EXCHANGE_TOKEN
+      :value: "ARCLIGHT_AZURE_EXCHANGE_TOKEN"
+
    .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN
       :value: "AZURE_ACTIVE_DIRECTORY_TOKEN"
 
@@ -476,6 +559,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: UpdateDisableLegacyAccessRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateDisableLegacyDbfsRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateDisableLegacyFeaturesRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateEnhancedSecurityMonitoringSettingRequest
    :members:
    :undoc-members:
@@ -488,6 +583,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: UpdateNotificationDestinationRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdatePersonalComputeSettingRequest
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst
index f25f3f575..ded587fe5 100644
--- a/docs/dbdataclasses/sharing.rst
+++ b/docs/dbdataclasses/sharing.rst
@@ -265,6 +265,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: EXECUTE
       :value: "EXECUTE"
 
+   .. py:attribute:: MANAGE
+      :value: "MANAGE"
+
    .. py:attribute:: MANAGE_ALLOWLIST
       :value: "MANAGE_ALLOWLIST"
 
diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst
index adf3ced56..1657146c3 100644
--- a/docs/dbdataclasses/sql.rst
+++ b/docs/dbdataclasses/sql.rst
@@ -12,6 +12,49 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AlertCondition
+   :members:
+   :undoc-members:
+
+.. autoclass:: AlertConditionOperand
+   :members:
+   :undoc-members:
+
+.. autoclass:: AlertConditionThreshold
+   :members:
+   :undoc-members:
+
+.. autoclass:: AlertOperandColumn
+   :members:
+   :undoc-members:
+
+.. autoclass:: AlertOperandValue
+   :members:
+   :undoc-members:
+
+.. py:class:: AlertOperator
+
+   .. py:attribute:: EQUAL
+      :value: "EQUAL"
+
+   .. py:attribute:: GREATER_THAN
+      :value: "GREATER_THAN"
+
+   .. py:attribute:: GREATER_THAN_OR_EQUAL
+      :value: "GREATER_THAN_OR_EQUAL"
+
+   .. py:attribute:: IS_NULL
+      :value: "IS_NULL"
+
+   .. py:attribute:: LESS_THAN
+      :value: "LESS_THAN"
+
+   .. py:attribute:: LESS_THAN_OR_EQUAL
+      :value: "LESS_THAN_OR_EQUAL"
+
+   .. py:attribute:: NOT_EQUAL
+      :value: "NOT_EQUAL"
+
 .. autoclass:: AlertOptions
    :members:
    :undoc-members:
@@ -35,8 +78,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AlertState
 
-   State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).
-
    .. py:attribute:: OK
       :value: "OK"
 
@@ -73,9 +114,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CHANNEL_NAME_PREVIEW
       :value: "CHANNEL_NAME_PREVIEW"
 
-   .. py:attribute:: CHANNEL_NAME_PREVIOUS
-      :value: "CHANNEL_NAME_PREVIOUS"
-
    .. py:attribute:: CHANNEL_NAME_UNSPECIFIED
       :value: "CHANNEL_NAME_UNSPECIFIED"
 
@@ -148,6 +186,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CreateAlertRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateAlertRequestAlert
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateQueryRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateQueryRequestQuery
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateVisualizationRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateVisualizationRequestVisualization
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateWarehouseRequest
    :members:
    :undoc-members:
@@ -193,6 +255,90 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: DatePrecision
+
+   .. py:attribute:: DAY_PRECISION
+      :value: "DAY_PRECISION"
+
+   .. py:attribute:: MINUTE_PRECISION
+      :value: "MINUTE_PRECISION"
+
+   .. py:attribute:: SECOND_PRECISION
+      :value: "SECOND_PRECISION"
+
+.. autoclass:: DateRange
+   :members:
+   :undoc-members:
+
+.. autoclass:: DateRangeValue
+   :members:
+   :undoc-members:
+
+.. py:class:: DateRangeValueDynamicDateRange
+
+   .. py:attribute:: LAST_12_MONTHS
+      :value: "LAST_12_MONTHS"
+
+   .. py:attribute:: LAST_14_DAYS
+      :value: "LAST_14_DAYS"
+
+   .. py:attribute:: LAST_24_HOURS
+      :value: "LAST_24_HOURS"
+
+   .. py:attribute:: LAST_30_DAYS
+      :value: "LAST_30_DAYS"
+
+   .. py:attribute:: LAST_60_DAYS
+      :value: "LAST_60_DAYS"
+
+   .. py:attribute:: LAST_7_DAYS
+      :value: "LAST_7_DAYS"
+
+   .. py:attribute:: LAST_8_HOURS
+      :value: "LAST_8_HOURS"
+
+   .. py:attribute:: LAST_90_DAYS
+      :value: "LAST_90_DAYS"
+
+   .. py:attribute:: LAST_HOUR
+      :value: "LAST_HOUR"
+
+   .. py:attribute:: LAST_MONTH
+      :value: "LAST_MONTH"
+
+   .. py:attribute:: LAST_WEEK
+      :value: "LAST_WEEK"
+
+   .. py:attribute:: LAST_YEAR
+      :value: "LAST_YEAR"
+
+   .. py:attribute:: THIS_MONTH
+      :value: "THIS_MONTH"
+
+   .. py:attribute:: THIS_WEEK
+      :value: "THIS_WEEK"
+
+   .. py:attribute:: THIS_YEAR
+      :value: "THIS_YEAR"
+
+   .. py:attribute:: TODAY
+      :value: "TODAY"
+
+   .. py:attribute:: YESTERDAY
+      :value: "YESTERDAY"
+
+.. autoclass:: DateValue
+   :members:
+   :undoc-members:
+
+.. py:class:: DateValueDynamicDate
+
+   .. py:attribute:: NOW
+      :value: "NOW"
+
+   .. py:attribute:: YESTERDAY
+      :value: "YESTERDAY"
+
 .. autoclass:: DeleteResponse
    :members:
    :undoc-members:
@@ -203,13 +349,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: Disposition
 
-   The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
-   Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` format, in a series of chunks. If a given statement produces a result set with a size larger than 25 MiB, that statement execution is aborted, and no result set will be available.
-   **NOTE** Byte limits are computed based upon internal representations of the result set data, and might not match the sizes visible in JSON responses.
-   Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The resulting links have two important properties:
-   1. They point to resources _external_ to the Databricks compute; therefore any associated authentication information (typically a personal access token, OAuth token, or similar) _must be removed_ when fetching from these links.
-   2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when attempting to use an expired link is cloud specific.
-
    .. py:attribute:: EXTERNAL_LINKS
       :value: "EXTERNAL_LINKS"
 
@@ -241,6 +380,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: Empty
+   :members:
+   :undoc-members:
+
 .. autoclass:: EndpointConfPair
    :members:
    :undoc-members:
@@ -274,6 +417,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: EnumValue
+   :members:
+   :undoc-members:
+
 .. autoclass:: ExecuteStatementRequest
    :members:
    :undoc-members:
@@ -288,10 +435,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CONTINUE
       :value: "CONTINUE"
 
-.. autoclass:: ExecuteStatementResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: ExternalLink
    :members:
    :undoc-members:
@@ -311,10 +454,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: GetStatementResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: GetWarehousePermissionLevelsResponse
    :members:
    :undoc-members:
@@ -353,6 +492,47 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: PASSTHROUGH
       :value: "PASSTHROUGH"
 
+.. autoclass:: LegacyAlert
+   :members:
+   :undoc-members:
+
+.. py:class:: LegacyAlertState
+
+   State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).
+
+   .. py:attribute:: OK
+      :value: "OK"
+
+   .. py:attribute:: TRIGGERED
+      :value: "TRIGGERED"
+
+   .. py:attribute:: UNKNOWN
+      :value: "UNKNOWN"
+
+.. autoclass:: LegacyQuery
+   :members:
+   :undoc-members:
+
+.. autoclass:: LegacyVisualization
+   :members:
+   :undoc-members:
+
+.. py:class:: LifecycleState
+
+   .. py:attribute:: ACTIVE
+      :value: "ACTIVE"
+
+   .. py:attribute:: TRASHED
+      :value: "TRASHED"
+
+.. autoclass:: ListAlertsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListAlertsResponseAlert
+   :members:
+   :undoc-members:
+
 .. py:class:: ListOrder
 
    .. py:attribute:: CREATED_AT
@@ -365,10 +545,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListQueryObjectsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListQueryObjectsResponseQuery
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListResponse
    :members:
    :undoc-members:
 
+.. autoclass:: ListVisualizationsForQueryResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListWarehousesResponse
    :members:
    :undoc-members:
@@ -377,6 +569,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: NumericValue
+   :members:
+   :undoc-members:
+
 .. py:class:: ObjectType
 
    A singular noun object type.
@@ -467,7 +663,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: PlansState
 
-   Whether plans exist for the execution, or the reason why they are missing
+   Possible Reasons for which we have not saved plans in the database
 
    .. py:attribute:: EMPTY
       :value: "EMPTY"
@@ -491,6 +687,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: QueryBackedValue
+   :members:
+   :undoc-members:
+
 .. autoclass:: QueryEditContent
    :members:
    :undoc-members:
@@ -515,14 +715,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: QueryParameter
+   :members:
+   :undoc-members:
+
 .. autoclass:: QueryPostContent
    :members:
    :undoc-members:
 
 .. py:class:: QueryStatementType
 
-   Type of statement for this query
-
    .. py:attribute:: ALTER
       :value: "ALTER"
 
@@ -591,11 +793,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: QueryStatus
 
-   Query status with one the following values: * `QUEUED`: Query has been received and queued. * `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: Query has failed. * `FINISHED`: Query has completed.
+   Statuses which are also used by OperationStatus in runtime
 
    .. py:attribute:: CANCELED
       :value: "CANCELED"
 
+   .. py:attribute:: COMPILED
+      :value: "COMPILED"
+
+   .. py:attribute:: COMPILING
+      :value: "COMPILING"
+
    .. py:attribute:: FAILED
       :value: "FAILED"
 
@@ -608,6 +816,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: RUNNING
       :value: "RUNNING"
 
+   .. py:attribute:: STARTED
+      :value: "STARTED"
+
 .. autoclass:: RepeatedEndpointConfPairs
    :members:
    :undoc-members:
@@ -628,6 +839,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: RunAsMode
+
+   .. py:attribute:: OWNER
+      :value: "OWNER"
+
+   .. py:attribute:: VIEWER
+      :value: "VIEWER"
+
 .. py:class:: RunAsRole
 
    Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
@@ -754,6 +973,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: StatementResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: StatementState
 
    Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, and statement closed; result no longer available for fetch
@@ -1070,6 +1293,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: SUCCESS
       :value: "SUCCESS"
 
+.. autoclass:: TextValue
+   :members:
+   :undoc-members:
+
 .. autoclass:: TimeRange
    :members:
    :undoc-members:
@@ -1078,10 +1305,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: UpdateAlertRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateAlertRequestAlert
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateQueryRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateQueryRequestQuery
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateResponse
    :members:
    :undoc-members:
 
+.. autoclass:: UpdateVisualizationRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateVisualizationRequestVisualization
+   :members:
+   :undoc-members:
+
 .. autoclass:: User
    :members:
    :undoc-members:
@@ -1109,6 +1360,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CAN_MANAGE
       :value: "CAN_MANAGE"
 
+   .. py:attribute:: CAN_MONITOR
+      :value: "CAN_MONITOR"
+
    .. py:attribute:: CAN_USE
       :value: "CAN_USE"
 
diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst
index eaf70f9e0..9ff3eb66b 100644
--- a/docs/dbdataclasses/workspace.rst
+++ b/docs/dbdataclasses/workspace.rst
@@ -23,7 +23,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateCredentials
+.. autoclass:: CreateCredentialsRequest
    :members:
    :undoc-members:
 
@@ -31,7 +31,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateRepo
+.. autoclass:: CreateRepoRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateRepoResponse
    :members:
    :undoc-members:
 
@@ -59,6 +63,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteCredentialsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteRepoResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteResponse
    :members:
    :undoc-members:
@@ -111,6 +123,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GetRepoResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: GetSecretResponse
    :members:
    :undoc-members:
@@ -171,6 +187,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListCredentialsResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListReposResponse
    :members:
    :undoc-members:
@@ -306,15 +326,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateCredentials
+.. autoclass:: UpdateCredentialsRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateCredentialsResponse
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateRepo
+.. autoclass:: UpdateRepoRequest
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateResponse
+.. autoclass:: UpdateRepoResponse
    :members:
    :undoc-members:
 
diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py
index e6070b0f9..5c32beffe 100644
--- a/docs/gen-client-docs.py
+++ b/docs/gen-client-docs.py
@@ -247,6 +247,7 @@ class Generator:
         Package("vectorsearch", "Vector Search", "Create and query Vector Search indexes"),
         Package("dashboards", "Dashboards", "Manage Lakeview dashboards"),
         Package("marketplace", "Marketplace", "Manage AI and analytics assets such as ML models, notebooks, applications in an open marketplace"),
+        Package("apps", "Apps", "Build custom applications on Databricks"),
     ]
 
     def __init__(self):
@@ -258,7 +259,7 @@ def _openapi_spec(self) -> str:
                 return f.read()
         with open(f'{__dir__}/../.codegen/_openapi_sha') as f:
             sha = f.read().strip()
-        return subprocess.check_output(['deco', 'openapi', 'get', sha]).decode('utf-8')
+        return subprocess.check_output(['genkit', 'get', sha]).decode('utf-8')
 
     def _load_mapping(self) -> dict[str, Tag]:
         mapping = {}
@@ -341,8 +342,15 @@ def service_docs(self, client_inst, client_prefix: str) -> list[ServiceDoc]:
                 continue
             if service_name in ignore_client_fields:
                 continue
-            class_doc = service_inst.__doc__
+
             class_name = service_inst.__class__.__name__
+
+            # Use original class docstring for mixin classes
+            if class_name.endswith('Ext'):
+                class_doc = service_inst.__class__.__base__.__doc__
+            else:        
+                class_doc = service_inst.__doc__
+
             print(f'Processing service {client_prefix}.{service_name}')
             all += self.service_docs(service_inst, client_prefix + "." + service_name)
 
diff --git a/docs/index.rst b/docs/index.rst
index a4873c43e..3d3a5dfc5 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -17,6 +17,7 @@ We are keen to hear feedback from you on these SDKs. Please `file GitHub issues
    pagination
    logging
    dbutils
+   dataplane
    clients/workspace
    workspace/index
    clients/account
diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst
new file mode 100644
index 000000000..774e75b8b
--- /dev/null
+++ b/docs/workspace/apps/apps.rst
@@ -0,0 +1,234 @@
+``w.apps``: Apps
+================
+.. currentmodule:: databricks.sdk.service.apps
+
+.. py:class:: AppsAPI
+
+    Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
+    Databricks services, and enable users to interact through single sign-on.
+
+    .. py:method:: create(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> Wait[App]
+
+        Create an app.
+        
+        Creates a new app.
+        
+        :param name: str
+          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
+          must be unique within the workspace.
+        :param description: str (optional)
+          The description of the app.
+        :param resources: List[:class:`AppResource`] (optional)
+          Resources for the app.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_active for more details.
+        
+
+    .. py:method:: create_and_wait(name: str [, description: Optional[str], resources: Optional[List[AppResource]], timeout: datetime.timedelta = 0:20:00]) -> App
+
+
+    .. py:method:: delete(name: str) -> App
+
+        Delete an app.
+        
+        Deletes an app.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns: :class:`App`
+        
+
+    .. py:method:: deploy(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str]]) -> Wait[AppDeployment]
+
+        Create an app deployment.
+        
+        Creates an app deployment for the app with the supplied name.
+        
+        :param app_name: str
+          The name of the app.
+        :param deployment_id: str (optional)
+          The unique id of the deployment.
+        :param mode: :class:`AppDeploymentMode` (optional)
+          The mode of which the deployment will manage the source code.
+        :param source_code_path: str (optional)
+          The workspace file system path of the source code used to create the app deployment. This is
+          different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
+          The former refers to the original source code location of the app in the workspace during deployment
+          creation, whereas the latter provides a system generated stable snapshotted source code path used by
+          the deployment.
+        
+        :returns:
+          Long-running operation waiter for :class:`AppDeployment`.
+          See :method:wait_get_deployment_app_succeeded for more details.
+        
+
+    .. py:method:: deploy_and_wait(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
+
+
+    .. py:method:: get(name: str) -> App
+
+        Get an app.
+        
+        Retrieves information for the app with the supplied name.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns: :class:`App`
+        
+
+    .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment
+
+        Get an app deployment.
+        
+        Retrieves information for the app deployment with the supplied name and deployment id.
+        
+        :param app_name: str
+          The name of the app.
+        :param deployment_id: str
+          The unique id of the deployment.
+        
+        :returns: :class:`AppDeployment`
+        
+
+    .. py:method:: get_permission_levels(app_name: str) -> GetAppPermissionLevelsResponse
+
+        Get app permission levels.
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        
+        :returns: :class:`GetAppPermissionLevelsResponse`
+        
+
+    .. py:method:: get_permissions(app_name: str) -> AppPermissions
+
+        Get app permissions.
+        
+        Gets the permissions of an app. Apps can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        
+        :returns: :class:`AppPermissions`
+        
+
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App]
+
+        List apps.
+        
+        Lists all apps in the workspace.
+        
+        :param page_size: int (optional)
+          Upper bound for items returned.
+        :param page_token: str (optional)
+          Pagination token to go to the next page of apps. Requests first page if absent.
+        
+        :returns: Iterator over :class:`App`
+        
+
+    .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment]
+
+        List app deployments.
+        
+        Lists all app deployments for the app with the supplied name.
+        
+        :param app_name: str
+          The name of the app.
+        :param page_size: int (optional)
+          Upper bound for items returned.
+        :param page_token: str (optional)
+          Pagination token to go to the next page of apps. Requests first page if absent.
+        
+        :returns: Iterator over :class:`AppDeployment`
+        
+
+    .. py:method:: set_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions
+
+        Set app permissions.
+        
+        Sets permissions on an app. Apps can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
+        
+        :returns: :class:`AppPermissions`
+        
+
+    .. py:method:: start(name: str) -> Wait[App]
+
+        Start an app.
+        
+        Start the last active deployment of the app in the workspace.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_active for more details.
+        
+
+    .. py:method:: start_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
+
+
+    .. py:method:: stop(name: str) -> Wait[App]
+
+        Stop an app.
+        
+        Stops the active deployment of the app in the workspace.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_stopped for more details.
+        
+
+    .. py:method:: stop_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
+
+
+    .. py:method:: update(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> App
+
+        Update an app.
+        
+        Updates the app with the supplied name.
+        
+        :param name: str
+          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
+          must be unique within the workspace.
+        :param description: str (optional)
+          The description of the app.
+        :param resources: List[:class:`AppResource`] (optional)
+          Resources for the app.
+        
+        :returns: :class:`App`
+        
+
+    .. py:method:: update_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions
+
+        Update app permissions.
+        
+        Updates the permissions on an app. Apps can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
+        
+        :returns: :class:`AppPermissions`
+        
+
+    .. py:method:: wait_get_app_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App
+
+
+    .. py:method:: wait_get_app_stopped(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App
+
+
+    .. py:method:: wait_get_deployment_app_succeeded(app_name: str, deployment_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[AppDeployment], None]]) -> AppDeployment
diff --git a/docs/workspace/apps/index.rst b/docs/workspace/apps/index.rst
new file mode 100644
index 000000000..bd21c93a5
--- /dev/null
+++ b/docs/workspace/apps/index.rst
@@ -0,0 +1,10 @@
+
+Apps
+====
+
+Build custom applications on Databricks
+
+.. toctree::
+   :maxdepth: 1
+
+   apps
\ No newline at end of file
diff --git a/docs/workspace/catalog/endpoints.rst b/docs/workspace/catalog/endpoints.rst
new file mode 100644
index 000000000..8c6efba40
--- /dev/null
+++ b/docs/workspace/catalog/endpoints.rst
@@ -0,0 +1,35 @@
+``w.endpoints``: Online Endpoints
+=================================
+.. currentmodule:: databricks.sdk.service.catalog
+
+.. py:class:: EndpointsAPI
+
+    Endpoints are used to connect to PG clusters.
+
+    .. py:method:: create( [, endpoint: Optional[Endpoint]]) -> Endpoint
+
+        Create an Endpoint.
+        
+        :param endpoint: :class:`Endpoint` (optional)
+          Endpoint
+        
+        :returns: :class:`Endpoint`
+        
+
+    .. py:method:: delete(name: str)
+
+        Delete an Endpoint.
+        
+        :param name: str
+        
+        
+        
+
+    .. py:method:: get(name: str) -> Endpoint
+
+        Get an Endpoint.
+        
+        :param name: str
+        
+        :returns: :class:`Endpoint`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst
index 3f6114f18..365007b09 100644
--- a/docs/workspace/catalog/external_locations.rst
+++ b/docs/workspace/catalog/external_locations.rst
@@ -15,7 +15,7 @@
     To create external locations, you must be a metastore admin or a user with the
     **CREATE_EXTERNAL_LOCATION** privilege.
 
-    .. py:method:: create(name: str, url: str, credential_name: str [, access_point: Optional[str], comment: Optional[str], encryption_details: Optional[EncryptionDetails], read_only: Optional[bool], skip_validation: Optional[bool]]) -> ExternalLocationInfo
+    .. py:method:: create(name: str, url: str, credential_name: str [, access_point: Optional[str], comment: Optional[str], encryption_details: Optional[EncryptionDetails], fallback: Optional[bool], read_only: Optional[bool], skip_validation: Optional[bool]]) -> ExternalLocationInfo
 
 
         Usage:
@@ -63,6 +63,10 @@
           User-provided free-form text description.
         :param encryption_details: :class:`EncryptionDetails` (optional)
           Encryption options that apply to clients connecting to cloud storage.
+        :param fallback: bool (optional)
+          Indicates whether fallback mode is enabled for this external location. When fallback mode is
+          enabled, the access to the location falls back to cluster credentials if UC credentials are not
+          sufficient.
         :param read_only: bool (optional)
           Indicates whether the external location is read-only.
         :param skip_validation: bool (optional)
@@ -163,7 +167,7 @@
         :returns: Iterator over :class:`ExternalLocationInfo`
         
 
-    .. py:method:: update(name: str [, access_point: Optional[str], comment: Optional[str], credential_name: Optional[str], encryption_details: Optional[EncryptionDetails], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool], url: Optional[str]]) -> ExternalLocationInfo
+    .. py:method:: update(name: str [, access_point: Optional[str], comment: Optional[str], credential_name: Optional[str], encryption_details: Optional[EncryptionDetails], fallback: Optional[bool], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool], url: Optional[str]]) -> ExternalLocationInfo
 
 
         Usage:
@@ -210,6 +214,10 @@
           Name of the storage credential used with this location.
         :param encryption_details: :class:`EncryptionDetails` (optional)
           Encryption options that apply to clients connecting to cloud storage.
+        :param fallback: bool (optional)
+          Indicates whether fallback mode is enabled for this external location. When fallback mode is
+          enabled, the access to the location falls back to cluster credentials if UC credentials are not
+          sufficient.
         :param force: bool (optional)
           Force update even if changing url invalidates dependent external tables or mounts.
         :param isolation_mode: :class:`IsolationMode` (optional)
diff --git a/docs/workspace/catalog/index.rst b/docs/workspace/catalog/index.rst
index 935804016..1372ca5a1 100644
--- a/docs/workspace/catalog/index.rst
+++ b/docs/workspace/catalog/index.rst
@@ -18,10 +18,12 @@ Configure data governance with Unity Catalog for metastores, catalogs, schemas,
    online_tables
    quality_monitors
    registered_models
+   resource_quotas
    schemas
    storage_credentials
    system_schemas
    table_constraints
    tables
+   temporary_table_credentials
    volumes
    workspace_bindings
\ No newline at end of file
diff --git a/docs/workspace/catalog/metastores.rst b/docs/workspace/catalog/metastores.rst
index f8a3c2872..01a936e0b 100644
--- a/docs/workspace/catalog/metastores.rst
+++ b/docs/workspace/catalog/metastores.rst
@@ -52,7 +52,8 @@
         :param metastore_id: str
           The unique ID of the metastore.
         :param default_catalog_name: str
-          The name of the default catalog in the metastore.
+          The name of the default catalog in the metastore. This field is depracted. Please use "Default
+          Namespace API" to configure the default catalog for a Databricks workspace.
         
         
         
@@ -305,7 +306,8 @@
         :param workspace_id: int
           A workspace ID.
         :param default_catalog_name: str (optional)
-          The name of the default catalog for the metastore.
+          The name of the default catalog in the metastore. This field is depracted. Please use "Default
+          Namespace API" to configure the default catalog for a Databricks workspace.
         :param metastore_id: str (optional)
           The unique ID of the metastore.
         
diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst
index 017a6aa15..bae6f25f8 100644
--- a/docs/workspace/catalog/model_versions.rst
+++ b/docs/workspace/catalog/model_versions.rst
@@ -30,7 +30,7 @@
         
         
 
-    .. py:method:: get(full_name: str, version: int [, include_browse: Optional[bool]]) -> RegisteredModelInfo
+    .. py:method:: get(full_name: str, version: int [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> ModelVersionInfo
 
         Get a Model Version.
         
@@ -44,14 +44,16 @@
           The three-level (fully qualified) name of the model version
         :param version: int
           The integer version number of the model version
+        :param include_aliases: bool (optional)
+          Whether to include aliases associated with the model version in the response
         :param include_browse: bool (optional)
           Whether to include model versions in the response for which the principal can only access selective
           metadata for
         
-        :returns: :class:`RegisteredModelInfo`
+        :returns: :class:`ModelVersionInfo`
         
 
-    .. py:method:: get_by_alias(full_name: str, alias: str) -> ModelVersionInfo
+    .. py:method:: get_by_alias(full_name: str, alias: str [, include_aliases: Optional[bool]]) -> ModelVersionInfo
 
         Get Model Version By Alias.
         
@@ -65,6 +67,8 @@
           The three-level (fully qualified) name of the registered model
         :param alias: str
           The name of the alias
+        :param include_aliases: bool (optional)
+          Whether to include aliases associated with the model version in the response
         
         :returns: :class:`ModelVersionInfo`
         
diff --git a/docs/workspace/catalog/quality_monitors.rst b/docs/workspace/catalog/quality_monitors.rst
index 030094049..93f05b69a 100644
--- a/docs/workspace/catalog/quality_monitors.rst
+++ b/docs/workspace/catalog/quality_monitors.rst
@@ -166,6 +166,29 @@
         :returns: :class:`MonitorRefreshListResponse`
         
 
+    .. py:method:: regenerate_dashboard(table_name: str [, warehouse_id: Optional[str]]) -> RegenerateDashboardResponse
+
+        Regenerate a monitoring dashboard.
+        
+        Regenerates the monitoring dashboard for the specified table.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+        owner of the table
+        
+        The call must be made from the workspace where the monitor was created. The dashboard will be
+        regenerated in the assets directory that was specified when the monitor was created.
+        
+        :param table_name: str
+          Full name of the table.
+        :param warehouse_id: str (optional)
+          Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first
+          running warehouse will be used.
+        
+        :returns: :class:`RegenerateDashboardResponse`
+        
+
     .. py:method:: run_refresh(table_name: str) -> MonitorRefreshInfo
 
         Queue a metric refresh for a monitor.
diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst
index 6a60c4f6d..b05a702b5 100644
--- a/docs/workspace/catalog/registered_models.rst
+++ b/docs/workspace/catalog/registered_models.rst
@@ -91,7 +91,7 @@
         
         
 
-    .. py:method:: get(full_name: str [, include_browse: Optional[bool]]) -> RegisteredModelInfo
+    .. py:method:: get(full_name: str [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> RegisteredModelInfo
 
         Get a Registered Model.
         
@@ -103,6 +103,8 @@
         
         :param full_name: str
           The three-level (fully qualified) name of the registered model
+        :param include_aliases: bool (optional)
+          Whether to include registered model aliases in the response
         :param include_browse: bool (optional)
           Whether to include registered models in the response for which the principal can only access
           selective metadata for
diff --git a/docs/workspace/catalog/resource_quotas.rst b/docs/workspace/catalog/resource_quotas.rst
new file mode 100644
index 000000000..3396011f0
--- /dev/null
+++ b/docs/workspace/catalog/resource_quotas.rst
@@ -0,0 +1,45 @@
+``w.resource_quotas``: Resource Quotas
+======================================
+.. currentmodule:: databricks.sdk.service.catalog
+
+.. py:class:: ResourceQuotasAPI
+
+    Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that
+    can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per
+    metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and
+    limits. For more information on resource quotas see the [Unity Catalog documentation].
+    
+    [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas
+
+    .. py:method:: get_quota(parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse
+
+        Get information for a single resource quota.
+        
+        The GetQuota API returns usage information for a single resource quota, defined as a child-parent
+        pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered
+        asynchronously. The updated count might not be returned in the first call.
+        
+        :param parent_securable_type: str
+          Securable type of the quota parent.
+        :param parent_full_name: str
+          Full name of the parent resource. Provide the metastore ID if the parent is a metastore.
+        :param quota_name: str
+          Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix.
+        
+        :returns: :class:`GetQuotaResponse`
+        
+
+    .. py:method:: list_quotas( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QuotaInfo]
+
+        List all resource quotas under a metastore.
+        
+        ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the
+        counts returned. This API does not trigger a refresh of quota counts.
+        
+        :param max_results: int (optional)
+          The number of quotas to return.
+        :param page_token: str (optional)
+          Opaque token for the next page of results.
+        
+        :returns: Iterator over :class:`QuotaInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst
index 1c9fcbbd0..feaf7c7a0 100644
--- a/docs/workspace/catalog/schemas.rst
+++ b/docs/workspace/catalog/schemas.rst
@@ -49,7 +49,7 @@
         :returns: :class:`SchemaInfo`
         
 
-    .. py:method:: delete(full_name: str)
+    .. py:method:: delete(full_name: str [, force: Optional[bool]])
 
         Delete a schema.
         
@@ -58,6 +58,8 @@
         
         :param full_name: str
           Full name of the schema.
+        :param force: bool (optional)
+          Force deletion even if the schema is not empty.
         
         
         
diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst
index b9ab3b0f9..2028a3623 100644
--- a/docs/workspace/catalog/system_schemas.rst
+++ b/docs/workspace/catalog/system_schemas.rst
@@ -37,7 +37,7 @@
         
         
 
-    .. py:method:: list(metastore_id: str) -> Iterator[SystemSchemaInfo]
+    .. py:method:: list(metastore_id: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SystemSchemaInfo]
 
         List system schemas.
         
@@ -46,6 +46,13 @@
         
         :param metastore_id: str
           The ID for the metastore in which the system schema resides.
+        :param max_results: int (optional)
+          Maximum number of schemas to return. - When set to 0, the page length is set to a server configured
+          value (recommended); - When set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - When set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all the schemas are returned (not recommended).
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: Iterator over :class:`SystemSchemaInfo`
         
\ No newline at end of file
diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst
index 6249f0da1..4cb458b46 100644
--- a/docs/workspace/catalog/tables.rst
+++ b/docs/workspace/catalog/tables.rst
@@ -45,7 +45,7 @@
         :returns: :class:`TableExistsResponse`
         
 
-    .. py:method:: get(full_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool]]) -> TableInfo
+    .. py:method:: get(full_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool]]) -> TableInfo
 
 
         Usage:
@@ -94,11 +94,13 @@
           for
         :param include_delta_metadata: bool (optional)
           Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         
         :returns: :class:`TableInfo`
         
 
-    .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
+    .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
 
 
         Usage:
@@ -138,6 +140,8 @@
           for
         :param include_delta_metadata: bool (optional)
           Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         :param max_results: int (optional)
           Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
           when set to a value greater than 0, the page length is the minimum of this value and a server
@@ -153,7 +157,7 @@
         :returns: Iterator over :class:`TableInfo`
         
 
-    .. py:method:: list_summaries(catalog_name: str [, max_results: Optional[int], page_token: Optional[str], schema_name_pattern: Optional[str], table_name_pattern: Optional[str]]) -> Iterator[TableSummary]
+    .. py:method:: list_summaries(catalog_name: str [, include_manifest_capabilities: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name_pattern: Optional[str], table_name_pattern: Optional[str]]) -> Iterator[TableSummary]
 
 
         Usage:
@@ -192,6 +196,8 @@
         
         :param catalog_name: str
           Name of parent catalog for tables of interest.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         :param max_results: int (optional)
           Maximum number of summaries for tables to return. If not set, the page length is set to a server
           configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst
new file mode 100644
index 000000000..1acd462b7
--- /dev/null
+++ b/docs/workspace/catalog/temporary_table_credentials.rst
@@ -0,0 +1,36 @@
+``w.temporary_table_credentials``: Temporary Table Credentials
+==============================================================
+.. currentmodule:: databricks.sdk.service.catalog
+
+.. py:class:: TemporaryTableCredentialsAPI
+
+    Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage
+    locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
+    time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
+    has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
+    Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
+    temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
+    scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
+    credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
+    the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
+    by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
+    catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+    security reason.
+
+    .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse
+
+        Generate a temporary table credential.
+        
+        Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
+        must have external_access_enabled flag set to true (default false). The caller must have
+        EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
+        owners.
+        
+        :param operation: :class:`TableOperation` (optional)
+          The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+          specified, the credentials returned will have write permissions, otherwise, it will be read only.
+        :param table_id: str (optional)
+          UUID of the table to read or write.
+        
+        :returns: :class:`GenerateTemporaryTableCredentialResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst
index e1ec753d4..08a74b29e 100644
--- a/docs/workspace/catalog/workspace_bindings.rst
+++ b/docs/workspace/catalog/workspace_bindings.rst
@@ -17,7 +17,7 @@
     the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the
     ability to bind a securable in READ_ONLY mode (catalogs only).
     
-    Securables that support binding: - catalog
+    Securable types that support binding: - catalog - storage_credential - external_location
 
     .. py:method:: get(name: str) -> CurrentWorkspaceBindings
 
@@ -50,19 +50,26 @@
         :returns: :class:`CurrentWorkspaceBindings`
         
 
-    .. py:method:: get_bindings(securable_type: str, securable_name: str) -> WorkspaceBindingsResponse
+    .. py:method:: get_bindings(securable_type: GetBindingsSecurableType, securable_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBinding]
 
         Get securable workspace bindings.
         
         Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the
         securable.
         
-        :param securable_type: str
-          The type of the securable.
+        :param securable_type: :class:`GetBindingsSecurableType`
+          The type of the securable to bind to a workspace.
         :param securable_name: str
           The name of the securable.
+        :param max_results: int (optional)
+          Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server
+          configured value (recommended); - When set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - When set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all the workspace bindings are returned (not recommended).
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
-        :returns: :class:`WorkspaceBindingsResponse`
+        :returns: Iterator over :class:`WorkspaceBinding`
         
 
     .. py:method:: update(name: str [, assign_workspaces: Optional[List[int]], unassign_workspaces: Optional[List[int]]]) -> CurrentWorkspaceBindings
@@ -103,15 +110,15 @@
         :returns: :class:`CurrentWorkspaceBindings`
         
 
-    .. py:method:: update_bindings(securable_type: str, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> WorkspaceBindingsResponse
+    .. py:method:: update_bindings(securable_type: UpdateBindingsSecurableType, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> WorkspaceBindingsResponse
 
         Update securable workspace bindings.
         
         Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the
         securable.
         
-        :param securable_type: str
-          The type of the securable.
+        :param securable_type: :class:`UpdateBindingsSecurableType`
+          The type of the securable to bind to a workspace.
         :param securable_name: str
           The name of the securable.
         :param add: List[:class:`WorkspaceBinding`] (optional)
diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst
index b6e67acff..1cefc8ca6 100644
--- a/docs/workspace/compute/cluster_policies.rst
+++ b/docs/workspace/compute/cluster_policies.rst
@@ -22,7 +22,7 @@
     If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create,
     edit, and delete policies. Admin users also have access to all policies.
 
-    .. py:method:: create(name: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) -> CreatePolicyResponse
+    .. py:method:: create( [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) -> CreatePolicyResponse
 
 
         Usage:
@@ -51,9 +51,6 @@
         
         Creates a new policy with prescribed settings.
         
-        :param name: str
-          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
-          characters.
         :param definition: str (optional)
           Policy definition document expressed in [Databricks Cluster Policy Definition Language].
           
@@ -66,6 +63,9 @@
         :param max_clusters_per_user: int (optional)
           Max number of clusters per user that can be active using this policy. If not present, there is no
           max limit.
+        :param name: str (optional)
+          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
+          characters.
         :param policy_family_definition_overrides: str (optional)
           Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
           document must be passed as a string and cannot be embedded in the requests.
@@ -96,7 +96,7 @@
         
         
 
-    .. py:method:: edit(policy_id: str, name: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]])
+    .. py:method:: edit(policy_id: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]])
 
 
         Usage:
@@ -140,9 +140,6 @@
         
         :param policy_id: str
           The ID of the policy to update.
-        :param name: str
-          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
-          characters.
         :param definition: str (optional)
           Policy definition document expressed in [Databricks Cluster Policy Definition Language].
           
@@ -155,6 +152,9 @@
         :param max_clusters_per_user: int (optional)
           Max number of clusters per user that can be active using this policy. If not present, there is no
           max limit.
+        :param name: str (optional)
+          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
+          characters.
         :param policy_family_definition_overrides: str (optional)
           Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
           document must be passed as a string and cannot be embedded in the requests.
@@ -205,7 +205,7 @@
         Get a cluster policy entity. Creation and editing is available to admins only.
         
         :param policy_id: str
-          Canonical unique identifier for the cluster policy.
+          Canonical unique identifier for the Cluster Policy.
         
         :returns: :class:`Policy`
         
diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst
index 58362d05e..ac52edecb 100644
--- a/docs/workspace/compute/clusters.rst
+++ b/docs/workspace/compute/clusters.rst
@@ -21,9 +21,8 @@
     restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive
     analysis.
     
-    IMPORTANT: Databricks retains cluster configuration information for up to 200 all-purpose clusters
-    terminated in the last 30 days and up to 30 job clusters recently terminated by the job scheduler. To keep
-    an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
+    IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To
+    keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
     administrator can pin a cluster to the cluster list.
 
     .. py:method:: change_owner(cluster_id: str, owner_username: str)
@@ -108,6 +107,11 @@
         If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
         Otherwise the cluster will terminate with an informative error message.
         
+        Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
+        the [create compute UI] and then copying the generated JSON definition from the UI.
+        
+        [create compute UI]: https://docs.databricks.com/compute/configure.html
+        
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -203,8 +207,13 @@
         :param policy_id: str (optional)
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
-          is inferred from spark_version.
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
           Single user name if data_security_mode is `SINGLE_USER`
         :param spark_conf: Dict[str,str] (optional)
@@ -426,8 +435,13 @@
         :param policy_id: str (optional)
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
-          is inferred from spark_version.
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
           Single user name if data_security_mode is `SINGLE_USER`
         :param spark_conf: Dict[str,str] (optional)
@@ -604,7 +618,7 @@
         :returns: :class:`ClusterPermissions`
         
 
-    .. py:method:: list( [, can_use_client: Optional[str]]) -> Iterator[ClusterDetails]
+    .. py:method:: list( [, filter_by: Optional[ListClustersFilterBy], page_size: Optional[int], page_token: Optional[str], sort_by: Optional[ListClustersSortBy]]) -> Iterator[ClusterDetails]
 
 
         Usage:
@@ -618,21 +632,21 @@
             
             all = w.clusters.list(compute.ListClustersRequest())
 
-        List all clusters.
-        
-        Return information about all pinned clusters, active clusters, up to 200 of the most recently
-        terminated all-purpose clusters in the past 30 days, and up to 30 of the most recently terminated job
-        clusters in the past 30 days.
+        List clusters.
         
-        For example, if there is 1 pinned cluster, 4 active clusters, 45 terminated all-purpose clusters in
-        the past 30 days, and 50 terminated job clusters in the past 30 days, then this API returns the 1
-        pinned cluster, 4 active clusters, all 45 terminated all-purpose clusters, and the 30 most recently
-        terminated job clusters.
+        Return information about all pinned and active clusters, and all clusters terminated within the last
+        30 days. Clusters terminated prior to this period are not included.
         
-        :param can_use_client: str (optional)
-          Filter clusters based on what type of client it can be used for. Could be either NOTEBOOKS or JOBS.
-          No input for this field will get all clusters in the workspace without filtering on its supported
-          client
+        :param filter_by: :class:`ListClustersFilterBy` (optional)
+          Filters to apply to the list of clusters.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          Use next_page_token or prev_page_token returned from the previous request to list the next or
+          previous page of clusters respectively.
+        :param sort_by: :class:`ListClustersSortBy` (optional)
+          Sort the list of clusters by a specific criteria.
         
         :returns: Iterator over :class:`ClusterDetails`
         
@@ -1000,6 +1014,37 @@
         
         
 
+    .. py:method:: update(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource]]) -> Wait[ClusterDetails]
+
+        Update cluster configuration (partial).
+        
+        Updates the configuration of a cluster to match the partial set of attributes and size. Denote which
+        fields to update using the `update_mask` field in the request body. A cluster can be updated if it is
+        in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be
+        restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED`
+        state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster
+        is started using the `clusters/start` API. Attempts to update a cluster in any other state will be
+        rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be
+        updated.
+        
+        :param cluster_id: str
+          ID of the cluster.
+        :param update_mask: str
+          Specifies which fields of the cluster will be updated. This is required in the POST request. The
+          update mask should be supplied as a single string. To specify multiple fields, separate them with
+          commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask`
+          string but omit it from the `cluster` object.
+        :param cluster: :class:`UpdateClusterResource` (optional)
+          The cluster to be updated.
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_running for more details.
+        
+
+    .. py:method:: update_and_wait(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
+
+
     .. py:method:: update_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions
 
         Update cluster permissions.
diff --git a/docs/workspace/compute/command_execution.rst b/docs/workspace/compute/command_execution.rst
index a5b94b5a5..916a48ba5 100644
--- a/docs/workspace/compute/command_execution.rst
+++ b/docs/workspace/compute/command_execution.rst
@@ -4,7 +4,8 @@
 
 .. py:class:: CommandExecutionAPI
 
-    This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.
+    This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API
+    only supports (classic) all-purpose clusters. Serverless compute is not supported.
 
     .. py:method:: cancel( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str]]) -> Wait[CommandStatusResponse]
 
diff --git a/docs/workspace/compute/index.rst b/docs/workspace/compute/index.rst
index b13a21610..858cf70ff 100644
--- a/docs/workspace/compute/index.rst
+++ b/docs/workspace/compute/index.rst
@@ -14,4 +14,5 @@ Use and configure compute for Databricks
    instance_pools
    instance_profiles
    libraries
+   policy_compliance_for_clusters
    policy_families
\ No newline at end of file
diff --git a/docs/workspace/compute/policy_compliance_for_clusters.rst b/docs/workspace/compute/policy_compliance_for_clusters.rst
new file mode 100644
index 000000000..90c3aeb98
--- /dev/null
+++ b/docs/workspace/compute/policy_compliance_for_clusters.rst
@@ -0,0 +1,71 @@
+``w.policy_compliance_for_clusters``: Policy compliance for clusters
+====================================================================
+.. currentmodule:: databricks.sdk.service.compute
+
+.. py:class:: PolicyComplianceForClustersAPI
+
+    The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your
+    workspace.
+    
+    A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could
+    be out of compliance if their policy was updated after the cluster was last edited.
+    
+    The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce
+    compliance API allows you to update a cluster to be compliant with the current version of its policy.
+
+    .. py:method:: enforce_compliance(cluster_id: str [, validate_only: Optional[bool]]) -> EnforceClusterComplianceResponse
+
+        Enforce cluster policy compliance.
+        
+        Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if
+        it is in a `RUNNING` or `TERMINATED` state.
+        
+        If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
+        can take effect.
+        
+        If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the
+        cluster is started, the new attributes will take effect.
+        
+        Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API.
+        Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
+        
+        :param cluster_id: str
+          The ID of the cluster you want to enforce policy compliance on.
+        :param validate_only: bool (optional)
+          If set, previews the changes that would be made to a cluster to enforce compliance but does not
+          update the cluster.
+        
+        :returns: :class:`EnforceClusterComplianceResponse`
+        
+
+    .. py:method:: get_compliance(cluster_id: str) -> GetClusterComplianceResponse
+
+        Get cluster policy compliance.
+        
+        Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy
+        was updated after the cluster was last edited.
+        
+        :param cluster_id: str
+          The ID of the cluster to get the compliance status
+        
+        :returns: :class:`GetClusterComplianceResponse`
+        
+
+    .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ClusterCompliance]
+
+        List cluster policy compliance.
+        
+        Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of
+        compliance if their policy was updated after the cluster was last edited.
+        
+        :param policy_id: str
+          Canonical unique identifier for the cluster policy.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          A page token that can be used to navigate to the next page or previous page as returned by
+          `next_page_token` or `prev_page_token`.
+        
+        :returns: Iterator over :class:`ClusterCompliance`
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/policy_families.rst b/docs/workspace/compute/policy_families.rst
index 43194ef01..56e4f4275 100644
--- a/docs/workspace/compute/policy_families.rst
+++ b/docs/workspace/compute/policy_families.rst
@@ -14,7 +14,7 @@
     policy family. Cluster policies created using a policy family inherit the policy family's policy
     definition.
 
-    .. py:method:: get(policy_family_id: str) -> PolicyFamily
+    .. py:method:: get(policy_family_id: str [, version: Optional[int]]) -> PolicyFamily
 
 
         Usage:
@@ -32,9 +32,12 @@
 
         Get policy family information.
         
-        Retrieve the information for an policy family based on its identifier.
+        Retrieve the information for an policy family based on its identifier and version
         
         :param policy_family_id: str
+          The family ID about which to retrieve information.
+        :param version: int (optional)
+          The version number for the family to fetch. Defaults to the latest version.
         
         :returns: :class:`PolicyFamily`
         
@@ -55,10 +58,11 @@
 
         List policy families.
         
-        Retrieve a list of policy families. This API is paginated.
+        Returns the list of policy definition types available to use at their latest version. This API is
+        paginated.
         
         :param max_results: int (optional)
-          The max number of policy families to return.
+          Maximum number of policy families to return.
         :param page_token: str (optional)
           A token that can be used to get the next page of results.
         
diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst
new file mode 100644
index 000000000..5581870b9
--- /dev/null
+++ b/docs/workspace/dashboards/genie.rst
@@ -0,0 +1,102 @@
+``w.genie``: Genie
+==================
+.. currentmodule:: databricks.sdk.service.dashboards
+
+.. py:class:: GenieAPI
+
+    Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
+    business users can use to ask questions using natural language. Genie uses data registered to Unity
+    Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
+    Assistant must be enabled.
+
+    .. py:method:: create_message(space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]
+
+        Create conversation message.
+        
+        Create new message in [conversation](:method:genie/startconversation). The AI response uses all
+        previously created messages in the conversation to respond.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the conversation is started.
+        :param conversation_id: str
+          The ID associated with the conversation.
+        :param content: str
+          User message content.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        
+
+    .. py:method:: create_message_and_wait(space_id: str, conversation_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage
+
+
+    .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse
+
+        Execute SQL query in a conversation message.
+        
+        Execute the SQL query in the message.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        
+
+    .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage
+
+        Get conversation message.
+        
+        Get message from conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the target conversation is located.
+        :param conversation_id: str
+          The ID associated with the target conversation.
+        :param message_id: str
+          The ID associated with the target message from the identified conversation.
+        
+        :returns: :class:`GenieMessage`
+        
+
+    .. py:method:: get_message_query_result(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse
+
+        Get conversation message SQL query result.
+        
+        Get the result of SQL query if the message has a query attachment. This is only available if a message
+        has a query attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        
+
+    .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage]
+
+        Start conversation.
+        
+        Start a new conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where you want to start a conversation.
+        :param content: str
+          The text of the message that starts the conversation.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        
+
+    .. py:method:: start_conversation_and_wait(space_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage
+
+
+    .. py:method:: wait_get_message_genie_completed(conversation_id: str, message_id: str, space_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GenieMessage], None]]) -> GenieMessage
diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst
index 756c9b549..6d1565bb6 100644
--- a/docs/workspace/dashboards/index.rst
+++ b/docs/workspace/dashboards/index.rst
@@ -7,4 +7,5 @@ Manage Lakeview dashboards
 .. toctree::
    :maxdepth: 1
 
+   genie
    lakeview
\ No newline at end of file
diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst
index 17f82960e..fe358063c 100644
--- a/docs/workspace/dashboards/lakeview.rst
+++ b/docs/workspace/dashboards/lakeview.rst
@@ -17,9 +17,14 @@
           The display name of the dashboard.
         :param parent_path: str (optional)
           The workspace path of the folder containing the dashboard. Includes leading slash and no trailing
-          slash.
+          slash. This field is excluded in List Dashboards responses.
         :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form.
+          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
+          responses. Use the [get dashboard API] to retrieve an example response, which includes the
+          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
+          the dashboard's layout and components.
+          
+          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
         :param warehouse_id: str (optional)
           The warehouse ID used to run the dashboard.
         
@@ -151,8 +156,7 @@
           The flag to include dashboards located in the trash. If unspecified, only active dashboards will be
           returned.
         :param view: :class:`DashboardView` (optional)
-          Indicates whether to include all metadata from the dashboard in the response. If unset, the response
-          defaults to `DASHBOARD_VIEW_BASIC` which only includes summary metadata from the dashboard.
+          `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard.
         
         :returns: Iterator over :class:`Dashboard`
         
@@ -258,9 +262,14 @@
           The display name of the dashboard.
         :param etag: str (optional)
           The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has
-          not been modified since the last read.
+          not been modified since the last read. This field is excluded in List Dashboards responses.
         :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form.
+          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
+          responses. Use the [get dashboard API] to retrieve an example response, which includes the
+          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
+          the dashboard's layout and components.
+          
+          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
         :param warehouse_id: str (optional)
           The warehouse ID used to run the dashboard.
         
diff --git a/docs/workspace/iam/permission_migration.rst b/docs/workspace/iam/permission_migration.rst
index 16d15f734..8eef6e0e1 100644
--- a/docs/workspace/iam/permission_migration.rst
+++ b/docs/workspace/iam/permission_migration.rst
@@ -1,20 +1,17 @@
-``w.permission_migration``: Permission Migration
-================================================
+``w.permission_migration``: PermissionMigration
+===============================================
 .. currentmodule:: databricks.sdk.service.iam
 
 .. py:class:: PermissionMigrationAPI
 
-    This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx.
+    APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx
 
-    .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> PermissionMigrationResponse
+    .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> MigratePermissionsResponse
 
         Migrate Permissions.
         
-        Migrate a batch of permissions from a workspace local group to an account group.
-        
         :param workspace_id: int
-          WorkspaceId of the associated workspace where the permission migration will occur. Both workspace
-          group and account group must be in this workspace.
+          WorkspaceId of the associated workspace where the permission migration will occur.
         :param from_workspace_group_name: str
           The name of the workspace group that permissions will be migrated from.
         :param to_account_group_name: str
@@ -22,5 +19,5 @@
         :param size: int (optional)
           The maximum number of permissions that will be migrated.
         
-        :returns: :class:`PermissionMigrationResponse`
+        :returns: :class:`MigratePermissionsResponse`
         
\ No newline at end of file
diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst
index 47ff4f37f..1f2fd2851 100644
--- a/docs/workspace/iam/permissions.rst
+++ b/docs/workspace/iam/permissions.rst
@@ -7,6 +7,8 @@
     Permissions API are used to create read, write, edit, update and manage access for various users on
     different objects and endpoints.
     
+    * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
+    
     * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to
     clusters.
     
@@ -42,7 +44,7 @@
     * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens.
     
     * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or
-    manage directories, files, and notebooks.
+    manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
     
     For the mapping of the required permissions for specific actions or abilities and other important
     information, see [Access Control].
@@ -78,9 +80,9 @@
         object.
         
         :param request_object_type: str
-          The type of the request object. Can be one of the following: authorization, clusters,
-          cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
-          registered-models, repos, serving-endpoints, or warehouses.
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
         :param request_object_id: str
           The id of the request object.
         
@@ -155,9 +157,9 @@
         object.
         
         :param request_object_type: str
-          The type of the request object. Can be one of the following: authorization, clusters,
-          cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
-          registered-models, repos, serving-endpoints, or warehouses.
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
         :param request_object_id: str
           The id of the request object.
         :param access_control_list: List[:class:`AccessControlRequest`] (optional)
@@ -173,9 +175,9 @@
         root object.
         
         :param request_object_type: str
-          The type of the request object. Can be one of the following: authorization, clusters,
-          cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
-          registered-models, repos, serving-endpoints, or warehouses.
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
         :param request_object_id: str
           The id of the request object.
         :param access_control_list: List[:class:`AccessControlRequest`] (optional)
diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst
index 4d7eabff8..1b6c5708c 100644
--- a/docs/workspace/index.rst
+++ b/docs/workspace/index.rst
@@ -7,6 +7,7 @@ These APIs are available from WorkspaceClient
 .. toctree::
    :maxdepth: 1
 
+   apps/index
    catalog/index
    compute/index
    dashboards/index
diff --git a/docs/workspace/jobs/index.rst b/docs/workspace/jobs/index.rst
index a8f242ea2..0729f8dce 100644
--- a/docs/workspace/jobs/index.rst
+++ b/docs/workspace/jobs/index.rst
@@ -7,4 +7,5 @@ Schedule automated jobs on Databricks Workspaces
 .. toctree::
    :maxdepth: 1
 
-   jobs
\ No newline at end of file
+   jobs
+   policy_compliance_for_jobs
\ No newline at end of file
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index 773f6fb85..b097c94c8 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -120,7 +120,7 @@
     .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run
 
 
-    .. py:method:: create( [, access_control_list: Optional[List[iam.AccessControlRequest]], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse
+    .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse
 
 
         Usage:
@@ -156,15 +156,19 @@
         
         Create a new job.
         
-        :param access_control_list: List[:class:`AccessControlRequest`] (optional)
+        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The id of the user specified budget policy to use for this job. If not specified, a default budget
+          policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
+          budget policy used by this workload.
         :param continuous: :class:`Continuous` (optional)
           An optional continuous property for this job. The continuous property will ensure that there is
           always one run executing. Only one of `schedule` and `continuous` can be used.
         :param deployment: :class:`JobDeployment` (optional)
           Deployment information for jobs managed by external sources.
         :param description: str (optional)
-          An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding.
+          An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.
         :param edit_mode: :class:`JobEditMode` (optional)
           Edit mode of the job.
           
@@ -174,7 +178,10 @@
           An optional set of email addresses that is notified when runs of this job begin or complete as well
           as when this job is deleted.
         :param environments: List[:class:`JobEnvironment`] (optional)
-          A list of task execution environment specifications that can be referenced by tasks of this job.
+          A list of task execution environment specifications that can be referenced by serverless tasks of
+          this job. An environment is required to be present for serverless tasks. For serverless notebook
+          tasks, the environment is accessible in the notebook environment panel. For other serverless tasks,
+          the task environment is required to be specified using environment_key in the task settings.
         :param format: :class:`Format` (optional)
           Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
           using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
@@ -211,12 +218,11 @@
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user or
-          service principal that the job runs as. If not specified, the job runs as the user who created the
-          job.
+          Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
+          not specified, the job/pipeline runs as the user who created the job/pipeline.
           
-          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
-          thrown.
+          Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an
+          error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
           triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
@@ -376,7 +382,7 @@
         :returns: :class:`JobPermissions`
         
 
-    .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool]]) -> Run
+    .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool], page_token: Optional[str]]) -> Run
 
 
         Usage:
@@ -418,6 +424,9 @@
           Whether to include the repair history in the response.
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
+        :param page_token: str (optional)
+          To list the next page or the previous page of job tasks, set this field to the value of the
+          `next_page_token` or `prev_page_token` returned in the GetJob response.
         
         :returns: :class:`Run`
         
@@ -676,6 +685,7 @@
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
         :param python_params: List[str] (optional)
           A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
@@ -865,6 +875,7 @@
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
         :param python_params: List[str] (optional)
           A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
@@ -924,7 +935,7 @@
         :returns: :class:`JobPermissions`
         
 
-    .. py:method:: submit( [, access_control_list: Optional[List[iam.AccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run]
+    .. py:method:: submit( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run]
 
 
         Usage:
@@ -960,8 +971,11 @@
         Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
         run state after the job is submitted.
         
-        :param access_control_list: List[:class:`AccessControlRequest`] (optional)
+        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The user specified id of the budget policy to use for this one-time run. If not specified, the run
+          will be not be attributed to any budget policy.
         :param email_notifications: :class:`JobEmailNotifications` (optional)
           An optional set of email addresses notified when the run begins or completes.
         :param environments: List[:class:`JobEnvironment`] (optional)
@@ -1011,7 +1025,7 @@
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         
 
-    .. py:method:: submit_and_wait( [, access_control_list: Optional[List[iam.AccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run
+    .. py:method:: submit_and_wait( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run
 
 
     .. py:method:: update(job_id: int [, fields_to_remove: Optional[List[str]], new_settings: Optional[JobSettings]])
diff --git a/docs/workspace/jobs/policy_compliance_for_jobs.rst b/docs/workspace/jobs/policy_compliance_for_jobs.rst
new file mode 100644
index 000000000..69f211552
--- /dev/null
+++ b/docs/workspace/jobs/policy_compliance_for_jobs.rst
@@ -0,0 +1,66 @@
+``w.policy_compliance_for_jobs``: Policy compliance for jobs
+============================================================
+.. currentmodule:: databricks.sdk.service.jobs
+
+.. py:class:: PolicyComplianceForJobsAPI
+
+    The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.
+    This API currently only supports compliance controls for cluster policies.
+    
+    A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster
+    policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last
+    edited. The job is considered out of compliance if any of its clusters no longer comply with their updated
+    policies.
+    
+    The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce
+    compliance API allows you to update a job so that it becomes compliant with all of its policies.
+
+    .. py:method:: enforce_compliance(job_id: int [, validate_only: Optional[bool]]) -> EnforcePolicyComplianceResponse
+
+        Enforce job policy compliance.
+        
+        Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
+        are compliant with the current versions of their respective cluster policies. All-purpose clusters
+        used in the job will not be updated.
+        
+        :param job_id: int
+          The ID of the job you want to enforce policy compliance on.
+        :param validate_only: bool (optional)
+          If set, previews changes made to the job to comply with its policy, but does not update the job.
+        
+        :returns: :class:`EnforcePolicyComplianceResponse`
+        
+
+    .. py:method:: get_compliance(job_id: int) -> GetPolicyComplianceResponse
+
+        Get job policy compliance.
+        
+        Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
+        they use was updated after the job was last edited and some of its job clusters no longer comply with
+        their updated policies.
+        
+        :param job_id: int
+          The ID of the job whose compliance status you are requesting.
+        
+        :returns: :class:`GetPolicyComplianceResponse`
+        
+
+    .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[JobCompliance]
+
+        List job policy compliance.
+        
+        Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
+        compliance if a cluster policy they use was updated after the job was last edited and its job clusters
+        no longer comply with the updated policy.
+        
+        :param policy_id: str
+          Canonical unique identifier for the cluster policy.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          A page token that can be used to navigate to the next page or previous page as returned by
+          `next_page_token` or `prev_page_token`.
+        
+        :returns: Iterator over :class:`JobCompliance`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/consumer_listings.rst b/docs/workspace/marketplace/consumer_listings.rst
index 654fe82d4..242a8fce7 100644
--- a/docs/workspace/marketplace/consumer_listings.rst
+++ b/docs/workspace/marketplace/consumer_listings.rst
@@ -29,7 +29,7 @@
         :returns: :class:`GetListingResponse`
         
 
-    .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_ascending: Optional[bool], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], sort_by: Optional[SortBy], tags: Optional[List[ListingTag]]]) -> Iterator[Listing]
+    .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], tags: Optional[List[ListingTag]]]) -> Iterator[Listing]
 
         List listings.
         
@@ -39,7 +39,6 @@
           Matches any of the following asset types
         :param categories: List[:class:`Category`] (optional)
           Matches any of the following categories
-        :param is_ascending: bool (optional)
         :param is_free: bool (optional)
           Filters each listing based on if it is free.
         :param is_private_exchange: bool (optional)
@@ -50,15 +49,13 @@
         :param page_token: str (optional)
         :param provider_ids: List[str] (optional)
           Matches any of the following provider ids
-        :param sort_by: :class:`SortBy` (optional)
-          Criteria for sorting the resulting set of listings.
         :param tags: List[:class:`ListingTag`] (optional)
           Matches any of the following tags
         
         :returns: Iterator over :class:`Listing`
         
 
-    .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_ascending: Optional[bool], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], sort_by: Optional[SortBy]]) -> Iterator[Listing]
+    .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]]]) -> Iterator[Listing]
 
         Search listings.
         
@@ -71,14 +68,12 @@
           Matches any of the following asset types
         :param categories: List[:class:`Category`] (optional)
           Matches any of the following categories
-        :param is_ascending: bool (optional)
         :param is_free: bool (optional)
         :param is_private_exchange: bool (optional)
         :param page_size: int (optional)
         :param page_token: str (optional)
         :param provider_ids: List[str] (optional)
           Matches any of the following provider ids
-        :param sort_by: :class:`SortBy` (optional)
         
         :returns: Iterator over :class:`Listing`
         
\ No newline at end of file
diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst
index 1ada6b1e5..c09cfe353 100644
--- a/docs/workspace/ml/experiments.rst
+++ b/docs/workspace/ml/experiments.rst
@@ -270,10 +270,16 @@
         Get all artifacts.
         
         List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response
-        contains only artifacts with the specified prefix.",
+        contains only artifacts with the specified prefix. This API does not support pagination when listing
+        artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
+        `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
+        pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
         
         :param page_token: str (optional)
-          Token indicating the page of artifact results to fetch
+          Token indicating the page of artifact results to fetch. `page_token` is not supported when listing
+          artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
+          `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
+          pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
         :param path: str (optional)
           Filter artifacts matching this path (a relative path from the root artifact directory).
         :param run_id: str (optional)
diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst
index a80e7c799..9801a200e 100644
--- a/docs/workspace/pipelines/pipelines.rst
+++ b/docs/workspace/pipelines/pipelines.rst
@@ -15,7 +15,7 @@
     also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
     data quality and specify how to handle records that fail those expectations.
 
-    .. py:method:: create( [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[ManagedIngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
+    .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
 
 
         Usage:
@@ -55,6 +55,8 @@
         
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name conflicts with that of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
         :param catalog: str (optional)
           A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
           in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -80,7 +82,7 @@
           The definition of a gateway pipeline to support CDC.
         :param id: str (optional)
           Unique identifier for this pipeline.
-        :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional)
+        :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
           The configuration for a managed ingestion pipeline. These settings cannot be used with the
           'libraries', 'target' or 'catalog' settings.
         :param libraries: List[:class:`PipelineLibrary`] (optional)
@@ -91,6 +93,9 @@
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
         :param serverless: bool (optional)
           Whether serverless compute is enabled for this pipeline.
         :param storage: str (optional)
@@ -371,7 +376,7 @@
     .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse
 
 
-    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[ManagedIngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
+    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
 
 
         Usage:
@@ -425,6 +430,8 @@
           Unique identifier for this pipeline.
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name has changed and conflicts the name of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
         :param catalog: str (optional)
           A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
           in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -452,7 +459,7 @@
           The definition of a gateway pipeline to support CDC.
         :param id: str (optional)
           Unique identifier for this pipeline.
-        :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional)
+        :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
           The configuration for a managed ingestion pipeline. These settings cannot be used with the
           'libraries', 'target' or 'catalog' settings.
         :param libraries: List[:class:`PipelineLibrary`] (optional)
@@ -463,6 +470,9 @@
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
         :param serverless: bool (optional)
           Whether serverless compute is enabled for this pipeline.
         :param storage: str (optional)
diff --git a/docs/workspace/serving/index.rst b/docs/workspace/serving/index.rst
index ce3d216ff..7a39a4043 100644
--- a/docs/workspace/serving/index.rst
+++ b/docs/workspace/serving/index.rst
@@ -7,5 +7,5 @@ Use real-time inference for machine learning
 .. toctree::
    :maxdepth: 1
 
-   apps
-   serving_endpoints
\ No newline at end of file
+   serving_endpoints
+   serving_endpoints_data_plane
\ No newline at end of file
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index 9244f333a..cbcbca964 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -2,7 +2,7 @@
 ==========================================
 .. currentmodule:: databricks.sdk.service.serving
 
-.. py:class:: ServingEndpointsAPI
+.. py:class:: ServingEndpointsExt
 
     The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
     
@@ -29,7 +29,7 @@
         :returns: :class:`BuildLogsResponse`
         
 
-    .. py:method:: create(name: str, config: EndpointCoreConfigInput [, rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
+    .. py:method:: create(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
 
         Create a new serving endpoint.
         
@@ -38,9 +38,12 @@
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
         :param config: :class:`EndpointCoreConfigInput`
           The core config of the serving endpoint.
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
-          Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model
-          endpoints are supported as of now.
+          Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
+          Gateway to manage rate limits.
         :param route_optimized: bool (optional)
           Enable route optimization for the serving endpoint.
         :param tags: List[:class:`EndpointTag`] (optional)
@@ -51,7 +54,7 @@
           See :method:wait_get_serving_endpoint_not_updating for more details.
         
 
-    .. py:method:: create_and_wait(name: str, config: EndpointCoreConfigInput [, rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
+    .. py:method:: create_and_wait(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
 
 
     .. py:method:: delete(name: str)
@@ -89,6 +92,12 @@
         :returns: :class:`ServingEndpointDetailed`
         
 
+    .. py:method:: get_langchain_chat_open_ai_client(model)
+
+
+    .. py:method:: get_open_ai_client()
+
+
     .. py:method:: get_open_api(name: str)
 
         Get the schema for a serving endpoint.
@@ -168,8 +177,8 @@
 
         Update rate limits of a serving endpoint.
         
-        Used to update the rate limits of a serving endpoint. NOTE: only external and foundation model
-        endpoints are supported as of now.
+        Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
+        currently supported. For external models, use AI Gateway to manage rate limits.
         
         :param name: str
           The name of the serving endpoint whose rate limits are being updated. This field is required.
@@ -179,6 +188,29 @@
         :returns: :class:`PutResponse`
         
 
+    .. py:method:: put_ai_gateway(name: str [, guardrails: Optional[AiGatewayGuardrails], inference_table_config: Optional[AiGatewayInferenceTableConfig], rate_limits: Optional[List[AiGatewayRateLimit]], usage_tracking_config: Optional[AiGatewayUsageTrackingConfig]]) -> PutAiGatewayResponse
+
+        Update AI Gateway of a serving endpoint.
+        
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently
+        supported.
+        
+        :param name: str
+          The name of the serving endpoint whose AI Gateway is being updated. This field is required.
+        :param guardrails: :class:`AiGatewayGuardrails` (optional)
+          Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
+        :param inference_table_config: :class:`AiGatewayInferenceTableConfig` (optional)
+          Configuration for payload logging using inference tables. Use these tables to monitor and audit data
+          being sent to and received from model APIs and to improve model quality.
+        :param rate_limits: List[:class:`AiGatewayRateLimit`] (optional)
+          Configuration for rate limits which can be set to limit endpoint traffic.
+        :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
+          Configuration to enable usage tracking using system tables. These tables allow you to monitor
+          operational usage on endpoints and their associated costs.
+        
+        :returns: :class:`PutAiGatewayResponse`
+        
+
     .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse
 
         Query a serving endpoint.
diff --git a/docs/workspace/serving/serving_endpoints_data_plane.rst b/docs/workspace/serving/serving_endpoints_data_plane.rst
new file mode 100644
index 000000000..8fb09e7ff
--- /dev/null
+++ b/docs/workspace/serving/serving_endpoints_data_plane.rst
@@ -0,0 +1,59 @@
+``w.serving_endpoints_data_plane``: Serving endpoints DataPlane
+===============================================================
+.. currentmodule:: databricks.sdk.service.serving
+
+.. py:class:: ServingEndpointsDataPlaneAPI
+
+    Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving
+    endpoints service.
+
+    .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse
+
+        Query a serving endpoint.
+        
+        :param name: str
+          The name of the serving endpoint. This field is required.
+        :param dataframe_records: List[Any] (optional)
+          Pandas Dataframe input in the records orientation.
+        :param dataframe_split: :class:`DataframeSplitInput` (optional)
+          Pandas Dataframe input in the split orientation.
+        :param extra_params: Dict[str,str] (optional)
+          The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
+          foundation model__ serving endpoints. This is a map of strings and should only be used with other
+          external/foundation model query fields.
+        :param input: Any (optional)
+          The input string (or array of strings) field used ONLY for __embeddings external & foundation
+          model__ serving endpoints and is the only field (along with extra_params if needed) used by
+          embeddings queries.
+        :param inputs: Any (optional)
+          Tensor-based input in columnar format.
+        :param instances: List[Any] (optional)
+          Tensor-based input in row format.
+        :param max_tokens: int (optional)
+          The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is an integer and should only be used with other chat/completions query fields.
+        :param messages: List[:class:`ChatMessage`] (optional)
+          The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
+          map of strings and should only be used with other chat query fields.
+        :param n: int (optional)
+          The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
+          model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
+          used with other chat/completions query fields.
+        :param prompt: Any (optional)
+          The prompt string (or array of strings) field used ONLY for __completions external & foundation
+          model__ serving endpoints and should only be used with other completions query fields.
+        :param stop: List[str] (optional)
+          The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
+          serving endpoints. This is a list of strings and should only be used with other chat/completions
+          query fields.
+        :param stream: bool (optional)
+          The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
+          query fields.
+        :param temperature: float (optional)
+          The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
+          other chat/completions query fields.
+        
+        :returns: :class:`QueryEndpointResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst
new file mode 100644
index 000000000..c8baba3a7
--- /dev/null
+++ b/docs/workspace/settings/disable_legacy_access.rst
@@ -0,0 +1,61 @@
+``w.settings.disable_legacy_access``: Disable Legacy Access
+===========================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: DisableLegacyAccessAPI
+
+    'Disabling legacy access' has the following impacts:
+    
+    1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+    Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+    Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+    Unity Catalog access on all path based access.
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse
+
+        Delete Legacy Access Disablement Status.
+        
+        Deletes legacy access disablement status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyAccessResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess
+
+        Retrieve Legacy Access Disablement Status.
+        
+        Retrieves legacy access disablement Status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyAccess`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess
+
+        Update Legacy Access Disablement Status.
+        
+        Updates legacy access disablement status.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyAccess`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyAccess`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst
new file mode 100644
index 000000000..ad11fa606
--- /dev/null
+++ b/docs/workspace/settings/disable_legacy_dbfs.rst
@@ -0,0 +1,57 @@
+``w.settings.disable_legacy_dbfs``: Disable Legacy DBFS
+=======================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: DisableLegacyDbfsAPI
+
+    When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
+    mounts). When the setting is off, all DBFS functionality is enabled
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyDbfsResponse
+
+        Delete the disable legacy DBFS setting.
+        
+        Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyDbfsResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyDbfs
+
+        Get the disable legacy DBFS setting.
+        
+        Gets the disable legacy DBFS setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyDbfs`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs
+
+        Update the disable legacy DBFS setting.
+        
+        Updates the disable legacy DBFS setting for the workspace.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyDbfs`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyDbfs`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst
index 5b56652ec..22655853b 100644
--- a/docs/workspace/settings/index.rst
+++ b/docs/workspace/settings/index.rst
@@ -9,10 +9,13 @@ Manage security settings for Accounts and Workspaces
 
    credentials_manager
    ip_access_lists
+   notification_destinations
    settings
    automatic_cluster_update
    compliance_security_profile
    default_namespace
+   disable_legacy_access
+   disable_legacy_dbfs
    enhanced_security_monitoring
    restrict_workspace_admins
    token_management
diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst
new file mode 100644
index 000000000..29d947f55
--- /dev/null
+++ b/docs/workspace/settings/notification_destinations.rst
@@ -0,0 +1,74 @@
+``w.notification_destinations``: Notification Destinations
+==========================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: NotificationDestinationsAPI
+
+    The notification destinations API lets you programmatically manage a workspace's notification
+    destinations. Notification destinations are used to send notifications for query alerts and jobs to
+    destinations outside of Databricks. Only workspace admins can create, update, and delete notification
+    destinations.
+
+    .. py:method:: create( [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination
+
+        Create a notification destination.
+        
+        Creates a notification destination. Requires workspace admin permissions.
+        
+        :param config: :class:`Config` (optional)
+          The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
+        :param display_name: str (optional)
+          The display name for the notification destination.
+        
+        :returns: :class:`NotificationDestination`
+        
+
+    .. py:method:: delete(id: str)
+
+        Delete a notification destination.
+        
+        Deletes a notification destination. Requires workspace admin permissions.
+        
+        :param id: str
+        
+        
+        
+
+    .. py:method:: get(id: str) -> NotificationDestination
+
+        Get a notification destination.
+        
+        Gets a notification destination.
+        
+        :param id: str
+        
+        :returns: :class:`NotificationDestination`
+        
+
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListNotificationDestinationsResult]
+
+        List notification destinations.
+        
+        Lists notification destinations.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ListNotificationDestinationsResult`
+        
+
+    .. py:method:: update(id: str [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination
+
+        Update a notification destination.
+        
+        Updates a notification destination. Requires workspace admin permissions. At least one field is
+        required in the request body.
+        
+        :param id: str
+        :param config: :class:`Config` (optional)
+          The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
+        :param display_name: str (optional)
+          The display name for the notification destination.
+        
+        :returns: :class:`NotificationDestination`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst
index 55f47dae0..588031926 100644
--- a/docs/workspace/settings/settings.rst
+++ b/docs/workspace/settings/settings.rst
@@ -34,6 +34,22 @@
         This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
         namespace only applies when using Unity Catalog-enabled compute.
 
+    .. py:property:: disable_legacy_access
+        :type: DisableLegacyAccessAPI
+
+        'Disabling legacy access' has the following impacts:
+        
+        1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+        Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+        Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+        Unity Catalog access on all path based access.
+
+    .. py:property:: disable_legacy_dbfs
+        :type: DisableLegacyDbfsAPI
+
+        When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
+        mounts). When the setting is off, all DBFS functionality is enabled
+
     .. py:property:: enhanced_security_monitoring
         :type: EnhancedSecurityMonitoringAPI
 
diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst
index 1382b5a92..7cf398ac0 100644
--- a/docs/workspace/sharing/providers.rst
+++ b/docs/workspace/sharing/providers.rst
@@ -100,7 +100,7 @@
         :returns: :class:`ProviderInfo`
         
 
-    .. py:method:: list( [, data_provider_global_metastore_id: Optional[str]]) -> Iterator[ProviderInfo]
+    .. py:method:: list( [, data_provider_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo]
 
 
         Usage:
@@ -123,11 +123,21 @@
         :param data_provider_global_metastore_id: str (optional)
           If not provided, all providers will be returned. If no providers exist with this ID, no results will
           be returned.
+        :param max_results: int (optional)
+          Maximum number of providers to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid providers are returned (not recommended). - Note: The
+          number of returned providers might be less than the specified max_results size, even zero. The only
+          definitive indication that no further providers can be fetched is when the next_page_token is unset
+          from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: Iterator over :class:`ProviderInfo`
         
 
-    .. py:method:: list_shares(name: str) -> Iterator[ProviderShare]
+    .. py:method:: list_shares(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderShare]
 
 
         Usage:
@@ -162,6 +172,16 @@
         
         :param name: str
           Name of the provider in which to list shares.
+        :param max_results: int (optional)
+          Maximum number of shares to return. - when set to 0, the page length is set to a server configured
+          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - when set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
+          returned shares might be less than the specified max_results size, even zero. The only definitive
+          indication that no further shares can be fetched is when the next_page_token is unset from the
+          response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: Iterator over :class:`ProviderShare`
         
diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst
index 86a004d36..44f2042bb 100644
--- a/docs/workspace/sharing/recipients.rst
+++ b/docs/workspace/sharing/recipients.rst
@@ -18,7 +18,7 @@
     recipient follows the activation link to download the credential file, and then uses the credential file
     to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.
 
-    .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], data_recipient_global_metastore_id: Optional[str], ip_access_list: Optional[IpAccessList], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs], sharing_code: Optional[str]]) -> RecipientInfo
+    .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], data_recipient_global_metastore_id: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs], sharing_code: Optional[str]]) -> RecipientInfo
 
 
         Usage:
@@ -51,6 +51,8 @@
           The global Unity Catalog metastore id provided by the data recipient. This field is required when
           the __authentication_type__ is **DATABRICKS**. The identifier is of format
           __cloud__:__region__:__metastore-uuid__.
+        :param expiration_time: int (optional)
+          Expiration timestamp of the token, in epoch milliseconds.
         :param ip_access_list: :class:`IpAccessList` (optional)
           IP Access List
         :param owner: str (optional)
@@ -108,7 +110,7 @@
         :returns: :class:`RecipientInfo`
         
 
-    .. py:method:: list( [, data_recipient_global_metastore_id: Optional[str]]) -> Iterator[RecipientInfo]
+    .. py:method:: list( [, data_recipient_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[RecipientInfo]
 
 
         Usage:
@@ -132,6 +134,16 @@
         :param data_recipient_global_metastore_id: str (optional)
           If not provided, all recipients will be returned. If no recipients exist with this ID, no results
           will be returned.
+        :param max_results: int (optional)
+          Maximum number of recipients to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The
+          number of returned recipients might be less than the specified max_results size, even zero. The only
+          definitive indication that no further recipients can be fetched is when the next_page_token is unset
+          from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: Iterator over :class:`RecipientInfo`
         
@@ -171,7 +183,7 @@
         :returns: :class:`RecipientInfo`
         
 
-    .. py:method:: share_permissions(name: str) -> GetRecipientSharePermissionsResponse
+    .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetRecipientSharePermissionsResponse
 
 
         Usage:
@@ -198,11 +210,21 @@
         
         :param name: str
           The name of the Recipient.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: :class:`GetRecipientSharePermissionsResponse`
         
 
-    .. py:method:: update(name: str [, comment: Optional[str], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]])
+    .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]])
 
 
         Usage:
@@ -232,6 +254,8 @@
           Name of the recipient.
         :param comment: str (optional)
           Description about the recipient.
+        :param expiration_time: int (optional)
+          Expiration timestamp of the token, in epoch milliseconds.
         :param ip_access_list: :class:`IpAccessList` (optional)
           IP Access List
         :param new_name: str (optional)
diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst
index 82cdd4e6f..4d14b811d 100644
--- a/docs/workspace/sharing/shares.rst
+++ b/docs/workspace/sharing/shares.rst
@@ -87,7 +87,7 @@
         :returns: :class:`ShareInfo`
         
 
-    .. py:method:: list() -> Iterator[ShareInfo]
+    .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ShareInfo]
 
 
         Usage:
@@ -95,20 +95,32 @@
         .. code-block::
 
             from databricks.sdk import WorkspaceClient
+            from databricks.sdk.service import sharing
             
             w = WorkspaceClient()
             
-            all = w.shares.list()
+            all = w.shares.list(sharing.ListSharesRequest())
 
         List shares.
         
         Gets an array of data object shares from the metastore. The caller must be a metastore admin or the
         owner of the share. There is no guarantee of a specific ordering of the elements in the array.
         
+        :param max_results: int (optional)
+          Maximum number of shares to return. - when set to 0, the page length is set to a server configured
+          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - when set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
+          returned shares might be less than the specified max_results size, even zero. The only definitive
+          indication that no further shares can be fetched is when the next_page_token is unset from the
+          response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
         :returns: Iterator over :class:`ShareInfo`
         
 
-    .. py:method:: share_permissions(name: str) -> catalog.PermissionsList
+    .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> catalog.PermissionsList
 
         Get permissions.
         
@@ -117,6 +129,16 @@
         
         :param name: str
           The name of the share.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         :returns: :class:`PermissionsList`
         
@@ -200,7 +222,7 @@
         :returns: :class:`ShareInfo`
         
 
-    .. py:method:: update_permissions(name: str [, changes: Optional[List[catalog.PermissionsChange]]])
+    .. py:method:: update_permissions(name: str [, changes: Optional[List[catalog.PermissionsChange]], max_results: Optional[int], page_token: Optional[str]])
 
         Update permissions.
         
@@ -214,6 +236,16 @@
           The name of the share.
         :param changes: List[:class:`PermissionsChange`] (optional)
           Array of permission changes.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
         
         
         
\ No newline at end of file
diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst
index 26ae453a2..c552d5f80 100644
--- a/docs/workspace/sql/alerts.rst
+++ b/docs/workspace/sql/alerts.rst
@@ -8,12 +8,8 @@
     periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
     notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
     the Jobs API, e.g. :method:jobs/create.
-    
-    **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-    
-    [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
 
-    .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> Alert
+    .. py:method:: create( [, alert: Optional[CreateAlertRequestAlert]]) -> Alert
 
 
         Usage:
@@ -29,60 +25,48 @@
             
             srcs = w.data_sources.list()
             
-            query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                                     data_source_id=srcs[0].id,
-                                     description="test query from Go SDK",
-                                     query="SELECT 1")
-            
-            alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"),
-                                    name=f'sdk-{time.time_ns()}',
-                                    query_id=query.id)
+            query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                                       warehouse_id=srcs[0].warehouse_id,
+                                                                       description="test query from Go SDK",
+                                                                       query_text="SELECT 1"))
+            
+            alert = w.alerts.create(
+                alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
+                    column=sql.AlertOperandColumn(name="1")),
+                                                                               op=sql.AlertOperator.EQUAL,
+                                                                               threshold=sql.AlertConditionThreshold(
+                                                                                   value=sql.AlertOperandValue(
+                                                                                       double_value=1))),
+                                                  display_name=f'sdk-{time.time_ns()}',
+                                                  query_id=query.id))
             
             # cleanup
-            w.queries.delete(query_id=query.id)
-            w.alerts.delete(alert_id=alert.id)
+            w.queries.delete(id=query.id)
+            w.alerts.delete(id=alert.id)
 
         Create an alert.
         
-        Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
-        condition of its result, and notifies users or notification destinations if the condition was met.
-        
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        Creates an alert.
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :param name: str
-          Name of the alert.
-        :param options: :class:`AlertOptions`
-          Alert configuration options.
-        :param query_id: str
-          Query ID.
-        :param parent: str (optional)
-          The identifier of the workspace folder containing the object.
-        :param rearm: int (optional)
-          Number of seconds after being triggered before the alert rearms itself and can be triggered again.
-          If `null`, alert will never be triggered again.
+        :param alert: :class:`CreateAlertRequestAlert` (optional)
         
         :returns: :class:`Alert`
         
 
-    .. py:method:: delete(alert_id: str)
+    .. py:method:: delete(id: str)
 
         Delete an alert.
         
-        Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
-        queries and dashboards, alerts cannot be moved to the trash.
-        
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
+        can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
+        deleted after 30 days.
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :param alert_id: str
+        :param id: str
         
         
         
 
-    .. py:method:: get(alert_id: str) -> Alert
+    .. py:method:: get(id: str) -> Alert
 
 
         Usage:
@@ -98,35 +82,37 @@
             
             srcs = w.data_sources.list()
             
-            query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                                     data_source_id=srcs[0].id,
-                                     description="test query from Go SDK",
-                                     query="SELECT 1")
+            query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                                       warehouse_id=srcs[0].warehouse_id,
+                                                                       description="test query from Go SDK",
+                                                                       query_text="SELECT 1"))
             
-            alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"),
-                                    name=f'sdk-{time.time_ns()}',
-                                    query_id=query.id)
+            alert = w.alerts.create(
+                alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
+                    column=sql.AlertOperandColumn(name="1")),
+                                                                               op=sql.AlertOperator.EQUAL,
+                                                                               threshold=sql.AlertConditionThreshold(
+                                                                                   value=sql.AlertOperandValue(
+                                                                                       double_value=1))),
+                                                  display_name=f'sdk-{time.time_ns()}',
+                                                  query_id=query.id))
             
-            by_id = w.alerts.get(alert_id=alert.id)
+            by_id = w.alerts.get(id=alert.id)
             
             # cleanup
-            w.queries.delete(query_id=query.id)
-            w.alerts.delete(alert_id=alert.id)
+            w.queries.delete(id=query.id)
+            w.alerts.delete(id=alert.id)
 
         Get an alert.
         
         Gets an alert.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :param alert_id: str
+        :param id: str
         
         :returns: :class:`Alert`
         
 
-    .. py:method:: list() -> Iterator[Alert]
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListAlertsResponseAlert]
 
 
         Usage:
@@ -134,23 +120,24 @@
         .. code-block::
 
             from databricks.sdk import WorkspaceClient
+            from databricks.sdk.service import sql
             
             w = WorkspaceClient()
             
-            all = w.alerts.list()
+            all = w.alerts.list(sql.ListAlertsRequest())
 
-        Get alerts.
+        List alerts.
         
-        Gets a list of alerts.
+        Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
+        concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        :param page_size: int (optional)
+        :param page_token: str (optional)
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :returns: Iterator over :class:`Alert`
+        :returns: Iterator over :class:`ListAlertsResponseAlert`
         
 
-    .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]])
+    .. py:method:: update(id: str, update_mask: str [, alert: Optional[UpdateAlertRequestAlert]]) -> Alert
 
 
         Usage:
@@ -166,42 +153,39 @@
             
             srcs = w.data_sources.list()
             
-            query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                                     data_source_id=srcs[0].id,
-                                     description="test query from Go SDK",
-                                     query="SELECT 1")
-            
-            alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"),
-                                    name=f'sdk-{time.time_ns()}',
-                                    query_id=query.id)
-            
-            w.alerts.update(options=sql.AlertOptions(column="1", op="==", value="1"),
-                            alert_id=alert.id,
-                            name=f'sdk-{time.time_ns()}',
-                            query_id=query.id)
+            query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                                       warehouse_id=srcs[0].warehouse_id,
+                                                                       description="test query from Go SDK",
+                                                                       query_text="SELECT 1"))
+            
+            alert = w.alerts.create(
+                alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
+                    column=sql.AlertOperandColumn(name="1")),
+                                                                               op=sql.AlertOperator.EQUAL,
+                                                                               threshold=sql.AlertConditionThreshold(
+                                                                                   value=sql.AlertOperandValue(
+                                                                                       double_value=1))),
+                                                  display_name=f'sdk-{time.time_ns()}',
+                                                  query_id=query.id))
+            
+            _ = w.alerts.update(id=alert.id,
+                                alert=sql.UpdateAlertRequestAlert(display_name=f'sdk-{time.time_ns()}'),
+                                update_mask="display_name")
             
             # cleanup
-            w.queries.delete(query_id=query.id)
-            w.alerts.delete(alert_id=alert.id)
+            w.queries.delete(id=query.id)
+            w.alerts.delete(id=alert.id)
 
         Update an alert.
         
         Updates an alert.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :param alert_id: str
-        :param name: str
-          Name of the alert.
-        :param options: :class:`AlertOptions`
-          Alert configuration options.
-        :param query_id: str
-          Query ID.
-        :param rearm: int (optional)
-          Number of seconds after being triggered before the alert rearms itself and can be triggered again.
-          If `null`, alert will never be triggered again.
-        
+        :param id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param alert: :class:`UpdateAlertRequestAlert` (optional)
         
+        :returns: :class:`Alert`
         
\ No newline at end of file
diff --git a/docs/workspace/sql/alerts_legacy.rst b/docs/workspace/sql/alerts_legacy.rst
new file mode 100644
index 000000000..6dfd96128
--- /dev/null
+++ b/docs/workspace/sql/alerts_legacy.rst
@@ -0,0 +1,114 @@
+``w.alerts_legacy``: Alerts (legacy)
+====================================
+.. currentmodule:: databricks.sdk.service.sql
+
+.. py:class:: AlertsLegacyAPI
+
+    The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
+    periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
+    notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
+    the Jobs API, e.g. :method:jobs/create.
+    
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+    .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> LegacyAlert
+
+        Create an alert.
+        
+        Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
+        condition of its result, and notifies users or notification destinations if the condition was met.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param name: str
+          Name of the alert.
+        :param options: :class:`AlertOptions`
+          Alert configuration options.
+        :param query_id: str
+          Query ID.
+        :param parent: str (optional)
+          The identifier of the workspace folder containing the object.
+        :param rearm: int (optional)
+          Number of seconds after being triggered before the alert rearms itself and can be triggered again.
+          If `null`, alert will never be triggered again.
+        
+        :returns: :class:`LegacyAlert`
+        
+
+    .. py:method:: delete(alert_id: str)
+
+        Delete an alert.
+        
+        Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
+        queries and dashboards, alerts cannot be moved to the trash.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param alert_id: str
+        
+        
+        
+
+    .. py:method:: get(alert_id: str) -> LegacyAlert
+
+        Get an alert.
+        
+        Gets an alert.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param alert_id: str
+        
+        :returns: :class:`LegacyAlert`
+        
+
+    .. py:method:: list() -> Iterator[LegacyAlert]
+
+        Get alerts.
+        
+        Gets a list of alerts.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :returns: Iterator over :class:`LegacyAlert`
+        
+
+    .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]])
+
+        Update an alert.
+        
+        Updates an alert.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param alert_id: str
+        :param name: str
+          Name of the alert.
+        :param options: :class:`AlertOptions`
+          Alert configuration options.
+        :param query_id: str
+          Query ID.
+        :param rearm: int (optional)
+          Number of seconds after being triggered before the alert rearms itself and can be triggered again.
+          If `null`, alert will never be triggered again.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/data_sources.rst b/docs/workspace/sql/data_sources.rst
index dcab75063..8f7321fa0 100644
--- a/docs/workspace/sql/data_sources.rst
+++ b/docs/workspace/sql/data_sources.rst
@@ -1,5 +1,5 @@
-``w.data_sources``: Data Sources
-================================
+``w.data_sources``: Data Sources (legacy)
+=========================================
 .. currentmodule:: databricks.sdk.service.sql
 
 .. py:class:: DataSourcesAPI
@@ -12,9 +12,9 @@
     advise you to use any text editor, REST client, or `grep` to search the response from this API for the
     name of your SQL warehouse as it appears in Databricks SQL.
     
-    **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+    **Note**: A new version of the Databricks SQL API is now available. [Learn more]
     
-    [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: list() -> Iterator[DataSource]
 
@@ -35,9 +35,10 @@
         API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
         queries against it.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
+        instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :returns: Iterator over :class:`DataSource`
         
\ No newline at end of file
diff --git a/docs/workspace/sql/dbsql_permissions.rst b/docs/workspace/sql/dbsql_permissions.rst
index fbf1aac2c..7f9e5d19c 100644
--- a/docs/workspace/sql/dbsql_permissions.rst
+++ b/docs/workspace/sql/dbsql_permissions.rst
@@ -16,9 +16,9 @@
     
     - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
     
-    **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+    **Note**: A new version of the Databricks SQL API is now available. [Learn more]
     
-    [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: get(object_type: ObjectTypePlural, object_id: str) -> GetResponse
 
@@ -26,9 +26,10 @@
         
         Gets a JSON representation of the access control list (ACL) for a specified object.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:workspace/getpermissions instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param object_type: :class:`ObjectTypePlural`
           The type of object permissions to check.
@@ -45,9 +46,10 @@
         Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
         ACL.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:workspace/setpermissions instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param object_type: :class:`ObjectTypePlural`
           The type of object permission to set.
@@ -64,9 +66,10 @@
         
         Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
+        :method:queries/update and :method:alerts/update respectively instead. [Learn more]
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
         
         :param object_type: :class:`OwnableObjectType`
           The type of object on which to change ownership.
diff --git a/docs/workspace/sql/index.rst b/docs/workspace/sql/index.rst
index 397de5c72..728730209 100644
--- a/docs/workspace/sql/index.rst
+++ b/docs/workspace/sql/index.rst
@@ -8,12 +8,15 @@ Manage Databricks SQL assets, including warehouses, dashboards, queries and quer
    :maxdepth: 1
 
    alerts
+   alerts_legacy
    dashboard_widgets
    dashboards
    data_sources
    dbsql_permissions
    queries
+   queries_legacy
    query_history
    query_visualizations
+   query_visualizations_legacy
    statement_execution
    warehouses
\ No newline at end of file
diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst
index d26ff2ba9..1f01c2f1d 100644
--- a/docs/workspace/sql/queries.rst
+++ b/docs/workspace/sql/queries.rst
@@ -1,18 +1,14 @@
-``w.queries``: Queries / Results
-================================
+``w.queries``: Queries
+======================
 .. currentmodule:: databricks.sdk.service.sql
 
 .. py:class:: QueriesAPI
 
-    These endpoints are used for CRUD operations on query definitions. Query definitions include the target
-    SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
+    The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that
+    includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
     scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
-    
-    **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-    
-    [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
 
-    .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Query
+    .. py:method:: create( [, query: Optional[CreateQueryRequestQuery]]) -> Query
 
 
         Usage:
@@ -22,76 +18,43 @@
             import time
             
             from databricks.sdk import WorkspaceClient
+            from databricks.sdk.service import sql
             
             w = WorkspaceClient()
             
             srcs = w.data_sources.list()
             
-            query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                                     data_source_id=srcs[0].id,
-                                     description="test query from Go SDK",
-                                     query="SHOW TABLES")
+            query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                                       warehouse_id=srcs[0].warehouse_id,
+                                                                       description="test query from Go SDK",
+                                                                       query_text="SHOW TABLES"))
             
             # cleanup
-            w.queries.delete(query_id=query.id)
-
-        Create a new query definition.
-        
-        Creates a new query definition. Queries created with this endpoint belong to the authenticated user
-        making the request.
-        
-        The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can
-        use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the
-        `data_source_id` from an existing query.
-        
-        **Note**: You cannot add a visualization until you create the query.
-        
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :param data_source_id: str (optional)
-          Data source ID maps to the ID of the data source used by the resource and is distinct from the
-          warehouse ID. [Learn more]
-          
-          [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
-        :param description: str (optional)
-          General description that conveys additional information about this query such as usage notes.
-        :param name: str (optional)
-          The title of this query that appears in list views, widget headings, and on the query page.
-        :param options: Any (optional)
-          Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
-          `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
-          overridden at runtime.
-        :param parent: str (optional)
-          The identifier of the workspace folder containing the object.
-        :param query: str (optional)
-          The text of the query to be run.
-        :param run_as_role: :class:`RunAsRole` (optional)
-          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-        :param tags: List[str] (optional)
+            w.queries.delete(id=query.id)
+
+        Create a query.
+        
+        Creates a query.
+        
+        :param query: :class:`CreateQueryRequestQuery` (optional)
         
         :returns: :class:`Query`
         
 
-    .. py:method:: delete(query_id: str)
+    .. py:method:: delete(id: str)
 
         Delete a query.
         
         Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
-        they cannot be used for alerts. The trash is deleted after 30 days.
-        
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
+        permanently deleted after 30 days.
         
-        :param query_id: str
+        :param id: str
         
         
         
 
-    .. py:method:: get(query_id: str) -> Query
+    .. py:method:: get(id: str) -> Query
 
 
         Usage:
@@ -101,89 +64,58 @@
             import time
             
             from databricks.sdk import WorkspaceClient
+            from databricks.sdk.service import sql
             
             w = WorkspaceClient()
             
             srcs = w.data_sources.list()
             
-            query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                                     data_source_id=srcs[0].id,
-                                     description="test query from Go SDK",
-                                     query="SHOW TABLES")
+            query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                                       warehouse_id=srcs[0].warehouse_id,
+                                                                       description="test query from Go SDK",
+                                                                       query_text="SHOW TABLES"))
             
-            by_id = w.queries.get(query_id=query.id)
+            by_id = w.queries.get(id=query.id)
             
             # cleanup
-            w.queries.delete(query_id=query.id)
+            w.queries.delete(id=query.id)
 
-        Get a query definition.
+        Get a query.
         
-        Retrieve a query object definition along with contextual permissions information about the currently
-        authenticated user.
+        Gets a query.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :param query_id: str
+        :param id: str
         
         :returns: :class:`Query`
         
 
-    .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[Query]
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListQueryObjectsResponseQuery]
 
-        Get a list of queries.
-        
-        Gets a list of queries. Optionally, this list can be filtered by a search term.
-        
-        **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
-        degradation, or a temporary ban.
+        List queries.
         
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
+        Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
+        concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
         
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :param order: str (optional)
-          Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
-          descending instead.
-          
-          - `name`: The name of the query.
-          
-          - `created_at`: The timestamp the query was created.
-          
-          - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank
-          value is treated as the highest value for sorting.
-          
-          - `executed_at`: The timestamp when the query was last run.
-          
-          - `created_by`: The user name of the user that created the query.
-        :param page: int (optional)
-          Page number to retrieve.
         :param page_size: int (optional)
-          Number of queries to return per page.
-        :param q: str (optional)
-          Full text search term
+        :param page_token: str (optional)
         
-        :returns: Iterator over :class:`Query`
+        :returns: Iterator over :class:`ListQueryObjectsResponseQuery`
         
 
-    .. py:method:: restore(query_id: str)
+    .. py:method:: list_visualizations(id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Visualization]
 
-        Restore a query.
-        
-        Restore a query that has been moved to the trash. A restored query appears in list views and searches.
-        You can use restored queries for alerts.
-        
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
+        List visualizations on a query.
         
-        :param query_id: str
+        Gets a list of visualizations on a query.
         
+        :param id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
         
+        :returns: Iterator over :class:`Visualization`
         
 
-    .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Query
+    .. py:method:: update(id: str, update_mask: str [, query: Optional[UpdateQueryRequestQuery]]) -> Query
 
 
         Usage:
@@ -193,55 +125,36 @@
             import time
             
             from databricks.sdk import WorkspaceClient
+            from databricks.sdk.service import sql
             
             w = WorkspaceClient()
             
             srcs = w.data_sources.list()
             
-            query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                                     data_source_id=srcs[0].id,
-                                     description="test query from Go SDK",
-                                     query="SHOW TABLES")
+            query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                                       warehouse_id=srcs[0].warehouse_id,
+                                                                       description="test query from Go SDK",
+                                                                       query_text="SHOW TABLES"))
             
-            updated = w.queries.update(query_id=query.id,
-                                       name=f'sdk-{time.time_ns()}',
-                                       data_source_id=srcs[0].id,
-                                       description="UPDATED: test query from Go SDK",
-                                       query="SELECT 2+2")
+            updated = w.queries.update(id=query.id,
+                                       query=sql.UpdateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                                         description="UPDATED: test query from Go SDK",
+                                                                         query_text="SELECT 2+2"),
+                                       update_mask="display_name,description,query_text")
             
             # cleanup
-            w.queries.delete(query_id=query.id)
-
-        Change a query definition.
-        
-        Modify this query definition.
-        
-        **Note**: You cannot undo this operation.
-        
-        **Note**: A new version of the Databricks SQL API will soon be available. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources
-        
-        :param query_id: str
-        :param data_source_id: str (optional)
-          Data source ID maps to the ID of the data source used by the resource and is distinct from the
-          warehouse ID. [Learn more]
-          
-          [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
-        :param description: str (optional)
-          General description that conveys additional information about this query such as usage notes.
-        :param name: str (optional)
-          The title of this query that appears in list views, widget headings, and on the query page.
-        :param options: Any (optional)
-          Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
-          `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
-          overridden at runtime.
-        :param query: str (optional)
-          The text of the query to be run.
-        :param run_as_role: :class:`RunAsRole` (optional)
-          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-        :param tags: List[str] (optional)
+            w.queries.delete(id=query.id)
+
+        Update a query.
+        
+        Updates a query.
+        
+        :param id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param query: :class:`UpdateQueryRequestQuery` (optional)
         
         :returns: :class:`Query`
         
\ No newline at end of file
diff --git a/docs/workspace/sql/queries_legacy.rst b/docs/workspace/sql/queries_legacy.rst
new file mode 100644
index 000000000..a7ab56836
--- /dev/null
+++ b/docs/workspace/sql/queries_legacy.rst
@@ -0,0 +1,183 @@
+``w.queries_legacy``: Queries (legacy)
+======================================
+.. currentmodule:: databricks.sdk.service.sql
+
+.. py:class:: QueriesLegacyAPI
+
+    These endpoints are used for CRUD operations on query definitions. Query definitions include the target
+    SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
+    scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
+    
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+    .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery
+
+        Create a new query definition.
+        
+        Creates a new query definition. Queries created with this endpoint belong to the authenticated user
+        making the request.
+        
+        The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can
+        use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the
+        `data_source_id` from an existing query.
+        
+        **Note**: You cannot add a visualization until you create the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param data_source_id: str (optional)
+          Data source ID maps to the ID of the data source used by the resource and is distinct from the
+          warehouse ID. [Learn more]
+          
+          [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
+        :param description: str (optional)
+          General description that conveys additional information about this query such as usage notes.
+        :param name: str (optional)
+          The title of this query that appears in list views, widget headings, and on the query page.
+        :param options: Any (optional)
+          Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
+          `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
+          overridden at runtime.
+        :param parent: str (optional)
+          The identifier of the workspace folder containing the object.
+        :param query: str (optional)
+          The text of the query to be run.
+        :param run_as_role: :class:`RunAsRole` (optional)
+          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+        :param tags: List[str] (optional)
+        
+        :returns: :class:`LegacyQuery`
+        
+
+    .. py:method:: delete(query_id: str)
+
+        Delete a query.
+        
+        Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
+        they cannot be used for alerts. The trash is deleted after 30 days.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+        
+        
+        
+
+    .. py:method:: get(query_id: str) -> LegacyQuery
+
+        Get a query definition.
+        
+        Retrieve a query object definition along with contextual permissions information about the currently
+        authenticated user.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+        
+        :returns: :class:`LegacyQuery`
+        
+
+    .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[LegacyQuery]
+
+        Get a list of queries.
+        
+        Gets a list of queries. Optionally, this list can be filtered by a search term.
+        
+        **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
+        degradation, or a temporary ban.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param order: str (optional)
+          Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
+          descending instead.
+          
+          - `name`: The name of the query.
+          
+          - `created_at`: The timestamp the query was created.
+          
+          - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank
+          value is treated as the highest value for sorting.
+          
+          - `executed_at`: The timestamp when the query was last run.
+          
+          - `created_by`: The user name of the user that created the query.
+        :param page: int (optional)
+          Page number to retrieve.
+        :param page_size: int (optional)
+          Number of queries to return per page.
+        :param q: str (optional)
+          Full text search term
+        
+        :returns: Iterator over :class:`LegacyQuery`
+        
+
+    .. py:method:: restore(query_id: str)
+
+        Restore a query.
+        
+        Restore a query that has been moved to the trash. A restored query appears in list views and searches.
+        You can use restored queries for alerts.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
+        [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+        
+        
+        
+
+    .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery
+
+        Change a query definition.
+        
+        Modify this query definition.
+        
+        **Note**: You cannot undo this operation.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+        :param data_source_id: str (optional)
+          Data source ID maps to the ID of the data source used by the resource and is distinct from the
+          warehouse ID. [Learn more]
+          
+          [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
+        :param description: str (optional)
+          General description that conveys additional information about this query such as usage notes.
+        :param name: str (optional)
+          The title of this query that appears in list views, widget headings, and on the query page.
+        :param options: Any (optional)
+          Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
+          `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
+          overridden at runtime.
+        :param query: str (optional)
+          The text of the query to be run.
+        :param run_as_role: :class:`RunAsRole` (optional)
+          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+        :param tags: List[str] (optional)
+        
+        :returns: :class:`LegacyQuery`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst
index 6aacd3c78..2f5520cdf 100644
--- a/docs/workspace/sql/query_history.rst
+++ b/docs/workspace/sql/query_history.rst
@@ -4,9 +4,10 @@
 
 .. py:class:: QueryHistoryAPI
 
-    Access the history of queries through SQL warehouses.
+    A service responsible for storing and retrieving the list of queries run against SQL endpoints and
+    serverless compute.
 
-    .. py:method:: list( [, filter_by: Optional[QueryFilter], include_metrics: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QueryInfo]
+    .. py:method:: list( [, filter_by: Optional[QueryFilter], include_metrics: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> ListQueriesResponse
 
 
         Usage:
@@ -23,20 +24,23 @@
 
         List Queries.
         
-        List the history of queries through SQL warehouses.
+        List the history of queries through SQL warehouses, and serverless compute.
         
-        You can filter by user ID, warehouse ID, status, and time range.
+        You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
+        returned first (up to max_results in request). The pagination token returned in response can be used
+        to list subsequent query statuses.
         
         :param filter_by: :class:`QueryFilter` (optional)
           A filter to limit query history results. This field is optional.
         :param include_metrics: bool (optional)
-          Whether to include metrics about query.
+          Whether to include the query metrics with each query. Only use this for a small subset of queries
+          (max_results). Defaults to false.
         :param max_results: int (optional)
-          Limit the number of results returned in one page. The default is 100.
+          Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
         :param page_token: str (optional)
           A token that can be used to get the next page of results. The token can contains characters that
           need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
-          %2B.
+          %2B. This field is optional.
         
-        :returns: Iterator over :class:`QueryInfo`
+        :returns: :class:`ListQueriesResponse`
         
\ No newline at end of file
diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst
index 53888cee7..95095fb20 100644
--- a/docs/workspace/sql/query_visualizations.rst
+++ b/docs/workspace/sql/query_visualizations.rst
@@ -4,56 +4,43 @@
 
 .. py:class:: QueryVisualizationsAPI
 
-    This is an evolving API that facilitates the addition and removal of vizualisations from existing queries
-    within the Databricks Workspace. Data structures may change over time.
+    This is an evolving API that facilitates the addition and removal of visualizations from existing queries
+    in the Databricks Workspace. Data structures can change over time.
 
-    .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> Visualization
+    .. py:method:: create( [, visualization: Optional[CreateVisualizationRequestVisualization]]) -> Visualization
 
-        Add visualization to a query.
+        Add a visualization to a query.
         
-        :param query_id: str
-          The identifier returned by :method:queries/create
-        :param type: str
-          The type of visualization: chart, table, pivot table, and so on.
-        :param options: Any
-          The options object varies widely from one visualization type to the next and is unsupported.
-          Databricks does not recommend modifying visualization settings in JSON.
-        :param description: str (optional)
-          A short description of this visualization. This is not displayed in the UI.
-        :param name: str (optional)
-          The name of the visualization that appears on dashboards and the query screen.
+        Adds a visualization to a query.
+        
+        :param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
         
         :returns: :class:`Visualization`
         
 
     .. py:method:: delete(id: str)
 
-        Remove visualization.
+        Remove a visualization.
+        
+        Removes a visualization.
         
         :param id: str
-          Widget ID returned by :method:queryvizualisations/create
         
         
         
 
-    .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[Query], type: Optional[str], updated_at: Optional[str]]) -> Visualization
+    .. py:method:: update(id: str, update_mask: str [, visualization: Optional[UpdateVisualizationRequestVisualization]]) -> Visualization
 
-        Edit existing visualization.
+        Update a visualization.
+        
+        Updates a visualization.
         
         :param id: str
-          The UUID for this visualization.
-        :param created_at: str (optional)
-        :param description: str (optional)
-          A short description of this visualization. This is not displayed in the UI.
-        :param name: str (optional)
-          The name of the visualization that appears on dashboards and the query screen.
-        :param options: Any (optional)
-          The options object varies widely from one visualization type to the next and is unsupported.
-          Databricks does not recommend modifying visualization settings in JSON.
-        :param query: :class:`Query` (optional)
-        :param type: str (optional)
-          The type of visualization: chart, table, pivot table, and so on.
-        :param updated_at: str (optional)
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
         
         :returns: :class:`Visualization`
         
\ No newline at end of file
diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst
new file mode 100644
index 000000000..f56f78a5f
--- /dev/null
+++ b/docs/workspace/sql/query_visualizations_legacy.rst
@@ -0,0 +1,85 @@
+``w.query_visualizations_legacy``: Query Visualizations (legacy)
+================================================================
+.. currentmodule:: databricks.sdk.service.sql
+
+.. py:class:: QueryVisualizationsLegacyAPI
+
+    This is an evolving API that facilitates the addition and removal of vizualisations from existing queries
+    within the Databricks Workspace. Data structures may change over time.
+    
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+    .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization
+
+        Add visualization to a query.
+        
+        Creates visualization in the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/create instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+          The identifier returned by :method:queries/create
+        :param type: str
+          The type of visualization: chart, table, pivot table, and so on.
+        :param options: Any
+          The options object varies widely from one visualization type to the next and is unsupported.
+          Databricks does not recommend modifying visualization settings in JSON.
+        :param description: str (optional)
+          A short description of this visualization. This is not displayed in the UI.
+        :param name: str (optional)
+          The name of the visualization that appears on dashboards and the query screen.
+        
+        :returns: :class:`LegacyVisualization`
+        
+
+    .. py:method:: delete(id: str)
+
+        Remove visualization.
+        
+        Removes a visualization from the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/delete instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param id: str
+          Widget ID returned by :method:queryvizualisations/create
+        
+        
+        
+
+    .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization
+
+        Edit existing visualization.
+        
+        Updates visualization in the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/update instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param id: str
+          The UUID for this visualization.
+        :param created_at: str (optional)
+        :param description: str (optional)
+          A short description of this visualization. This is not displayed in the UI.
+        :param name: str (optional)
+          The name of the visualization that appears on dashboards and the query screen.
+        :param options: Any (optional)
+          The options object varies widely from one visualization type to the next and is unsupported.
+          Databricks does not recommend modifying visualization settings in JSON.
+        :param query: :class:`LegacyQuery` (optional)
+        :param type: str (optional)
+          The type of visualization: chart, table, pivot table, and so on.
+        :param updated_at: str (optional)
+        
+        :returns: :class:`LegacyVisualization`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst
index 7914977c2..716fa4fdc 100644
--- a/docs/workspace/sql/statement_execution.rst
+++ b/docs/workspace/sql/statement_execution.rst
@@ -82,7 +82,9 @@
     are approximate, occur server-side, and cannot account for things such as caller delays and network
     latency from caller to service. - The system will auto-close a statement after one hour if the client
     stops polling and thus you must poll at least once an hour. - The results are only available for one hour
-    after success; polling does not extend this.
+    after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle
+    of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL
+    Execution API to cancel it.
     
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
     [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
@@ -101,7 +103,7 @@
         
         
 
-    .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> ExecuteStatementResponse
+    .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> StatementResponse
 
         Execute a SQL statement.
         
@@ -122,26 +124,6 @@
           
           [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
         :param disposition: :class:`Disposition` (optional)
-          The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`.
-          
-          Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY`
-          format, in a series of chunks. If a given statement produces a result set with a size larger than 25
-          MiB, that statement execution is aborted, and no result set will be available.
-          
-          **NOTE** Byte limits are computed based upon internal representations of the result set data, and
-          might not match the sizes visible in JSON responses.
-          
-          Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links:
-          URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition
-          allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The
-          resulting links have two important properties:
-          
-          1. They point to resources _external_ to the Databricks compute; therefore any associated
-          authentication information (typically a personal access token, OAuth token, or similar) _must be
-          removed_ when fetching from these links.
-          
-          2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when
-          attempting to use an expired link is cloud specific.
         :param format: :class:`Format` (optional)
           Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
           `CSV`.
@@ -229,10 +211,10 @@
           the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
           timeout is reached.
         
-        :returns: :class:`ExecuteStatementResponse`
+        :returns: :class:`StatementResponse`
         
 
-    .. py:method:: get_statement(statement_id: str) -> GetStatementResponse
+    .. py:method:: get_statement(statement_id: str) -> StatementResponse
 
         Get status, manifest, and result first chunk.
         
@@ -248,7 +230,7 @@
           The statement ID is returned upon successfully submitting a SQL statement, and is a required
           reference for all subsequent calls.
         
-        :returns: :class:`GetStatementResponse`
+        :returns: :class:`StatementResponse`
         
 
     .. py:method:: get_statement_result_chunk_n(statement_id: str, chunk_index: int) -> ResultData
diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst
index 793852680..58b8a3fc0 100644
--- a/docs/workspace/sql/warehouses.rst
+++ b/docs/workspace/sql/warehouses.rst
@@ -17,13 +17,18 @@
             import time
             
             from databricks.sdk import WorkspaceClient
+            from databricks.sdk.service import sql
             
             w = WorkspaceClient()
             
-            created = w.warehouses.create(name=f'sdk-{time.time_ns()}',
-                                          cluster_size="2X-Small",
-                                          max_num_clusters=1,
-                                          auto_stop_mins=10).result()
+            created = w.warehouses.create(
+                name=f'sdk-{time.time_ns()}',
+                cluster_size="2X-Small",
+                max_num_clusters=1,
+                auto_stop_mins=10,
+                tags=sql.EndpointTags(
+                    custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
+                                 ])).result()
             
             # cleanup
             w.warehouses.delete(id=created.id)
@@ -36,7 +41,8 @@
           The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
           is automatically stopped.
           
-          Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+          Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
+          non-serverless warehouses - 0 indicates no autostop.
           
           Defaults to 120 mins
         :param channel: :class:`Channel` (optional)
@@ -117,13 +123,18 @@
             import time
             
             from databricks.sdk import WorkspaceClient
+            from databricks.sdk.service import sql
             
             w = WorkspaceClient()
             
-            created = w.warehouses.create(name=f'sdk-{time.time_ns()}',
-                                          cluster_size="2X-Small",
-                                          max_num_clusters=1,
-                                          auto_stop_mins=10).result()
+            created = w.warehouses.create(
+                name=f'sdk-{time.time_ns()}',
+                cluster_size="2X-Small",
+                max_num_clusters=1,
+                auto_stop_mins=10,
+                tags=sql.EndpointTags(
+                    custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
+                                 ])).result()
             
             _ = w.warehouses.edit(id=created.id,
                                   name=f'sdk-{time.time_ns()}',
@@ -213,13 +224,18 @@
             import time
             
             from databricks.sdk import WorkspaceClient
+            from databricks.sdk.service import sql
             
             w = WorkspaceClient()
             
-            created = w.warehouses.create(name=f'sdk-{time.time_ns()}',
-                                          cluster_size="2X-Small",
-                                          max_num_clusters=1,
-                                          auto_stop_mins=10).result()
+            created = w.warehouses.create(
+                name=f'sdk-{time.time_ns()}',
+                cluster_size="2X-Small",
+                max_num_clusters=1,
+                auto_stop_mins=10,
+                tags=sql.EndpointTags(
+                    custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
+                                 ])).result()
             
             wh = w.warehouses.get(id=created.id)
             
diff --git a/docs/workspace/workspace/git_credentials.rst b/docs/workspace/workspace/git_credentials.rst
index 490cb16ea..34851e84a 100644
--- a/docs/workspace/workspace/git_credentials.rst
+++ b/docs/workspace/workspace/git_credentials.rst
@@ -33,9 +33,9 @@
         existing credentials, or the DELETE endpoint to delete existing credentials.
         
         :param git_provider: str
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param git_username: str (optional)
           The username or email provided with your Git provider account, depending on which provider you are
           using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
@@ -44,8 +44,7 @@
           Access Token authentication documentation to see what is supported.
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-          access token used to authenticate to the corresponding Git
+          providers, support may exist for other types of scoped access tokens. [Learn more].
           
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
         
@@ -64,7 +63,7 @@
         
         
 
-    .. py:method:: get(credential_id: int) -> CredentialInfo
+    .. py:method:: get(credential_id: int) -> GetCredentialsResponse
 
 
         Usage:
@@ -89,7 +88,7 @@
         :param credential_id: int
           The ID for the corresponding credential to access.
         
-        :returns: :class:`CredentialInfo`
+        :returns: :class:`GetCredentialsResponse`
         
 
     .. py:method:: list() -> Iterator[CredentialInfo]
@@ -112,7 +111,7 @@
         :returns: Iterator over :class:`CredentialInfo`
         
 
-    .. py:method:: update(credential_id: int [, git_provider: Optional[str], git_username: Optional[str], personal_access_token: Optional[str]])
+    .. py:method:: update(credential_id: int, git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]])
 
 
         Usage:
@@ -141,10 +140,10 @@
         
         :param credential_id: int
           The ID for the corresponding credential to access.
-        :param git_provider: str (optional)
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+        :param git_provider: str
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param git_username: str (optional)
           The username or email provided with your Git provider account, depending on which provider you are
           using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
@@ -153,8 +152,7 @@
           Access Token authentication documentation to see what is supported.
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-          access token used to authenticate to the corresponding Git
+          providers, support may exist for other types of scoped access tokens. [Learn more].
           
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
         
diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst
index a5c602a3a..01b1c875f 100644
--- a/docs/workspace/workspace/repos.rst
+++ b/docs/workspace/workspace/repos.rst
@@ -14,7 +14,7 @@
     Within Repos you can develop code in notebooks or other files and follow data science and engineering code
     development best practices using Git for version control, collaboration, and CI/CD.
 
-    .. py:method:: create(url: str, provider: str [, path: Optional[str], sparse_checkout: Optional[SparseCheckout]]) -> RepoInfo
+    .. py:method:: create(url: str, provider: str [, path: Optional[str], sparse_checkout: Optional[SparseCheckout]]) -> CreateRepoResponse
 
 
         Usage:
@@ -42,17 +42,17 @@
         :param url: str
           URL of the Git repository to be linked.
         :param provider: str
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param path: str (optional)
           Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
-          is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}.
+          is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
         :param sparse_checkout: :class:`SparseCheckout` (optional)
           If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
           sparse checkout after the repo is created.
         
-        :returns: :class:`RepoInfo`
+        :returns: :class:`CreateRepoResponse`
         
 
     .. py:method:: delete(repo_id: int)
@@ -62,12 +62,12 @@
         Deletes the specified repo.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         
         
         
 
-    .. py:method:: get(repo_id: int) -> RepoInfo
+    .. py:method:: get(repo_id: int) -> GetRepoResponse
 
 
         Usage:
@@ -94,9 +94,9 @@
         Returns the repo with the given repo ID.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         
-        :returns: :class:`RepoInfo`
+        :returns: :class:`GetRepoResponse`
         
 
     .. py:method:: get_permission_levels(repo_id: str) -> GetRepoPermissionLevelsResponse
@@ -139,15 +139,16 @@
 
         Get repos.
         
-        Returns repos that the calling user has Manage permissions on. Results are paginated with each page
-        containing twenty repos.
+        Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
+        through additional pages.
         
         :param next_page_token: str (optional)
           Token used to get the next page of results. If not specified, returns the first page of results as
           well as a next page token if there are more results.
         :param path_prefix: str (optional)
-          Filters repos that have paths starting with the given path prefix. If not provided repos from /Repos
-          will be served.
+          Filters repos that have paths starting with the given path prefix. If not provided or when provided
+          an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
+          be served.
         
         :returns: Iterator over :class:`RepoInfo`
         
@@ -193,7 +194,7 @@
         branch.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         :param branch: str (optional)
           Branch that the local version of the repo is checked out to.
         :param sparse_checkout: :class:`SparseCheckoutUpdate` (optional)
diff --git a/examples/account/billable_usage/download_usage_download.py b/examples/account/billable_usage/download_usage_download.py
index aba474963..9147f87b9 100755
--- a/examples/account/billable_usage/download_usage_download.py
+++ b/examples/account/billable_usage/download_usage_download.py
@@ -2,4 +2,4 @@
 
 a = AccountClient()
 
-resp = a.billable_usage.download(start_month="2023-01", end_month="2023-02")
+resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09")
diff --git a/examples/account/budgets/create_budgets.py b/examples/account/budgets/create_budgets.py
index 12f20786a..030cc8a57 100755
--- a/examples/account/budgets/create_budgets.py
+++ b/examples/account/budgets/create_budgets.py
@@ -5,13 +5,26 @@
 
 a = AccountClient()
 
-created = a.budgets.create(budget=billing.Budget(
-    name=f'sdk-{time.time_ns()}',
-    filter="tag.tagName = 'all'",
-    period="1 month",
-    start_date="2022-01-01",
-    target_amount="100",
-    alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)]))
+created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
+    display_name=f'sdk-{time.time_ns()}',
+    filter=billing.BudgetConfigurationFilter(tags=[
+        billing.BudgetConfigurationFilterTagClause(key="tagName",
+                                                   value=billing.BudgetConfigurationFilterClause(
+                                                       operator=billing.BudgetConfigurationFilterOperator.IN,
+                                                       values=["all"]))
+    ]),
+    alert_configurations=[
+        billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+            time_period=billing.AlertConfigurationTimePeriod.MONTH,
+            quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+            trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+            quantity_threshold="100",
+            action_configurations=[
+                billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                    action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                    target="admin@example.com")
+            ])
+    ]))
 
 # cleanup
-a.budgets.delete(budget_id=created.budget.budget_id)
+a.budgets.delete(budget_id=created.budget.budget_configuration_id)
diff --git a/examples/account/budgets/get_budgets.py b/examples/account/budgets/get_budgets.py
index 8640fc974..9c2973110 100755
--- a/examples/account/budgets/get_budgets.py
+++ b/examples/account/budgets/get_budgets.py
@@ -5,15 +5,28 @@
 
 a = AccountClient()
 
-created = a.budgets.create(budget=billing.Budget(
-    name=f'sdk-{time.time_ns()}',
-    filter="tag.tagName = 'all'",
-    period="1 month",
-    start_date="2022-01-01",
-    target_amount="100",
-    alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)]))
+created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
+    display_name=f'sdk-{time.time_ns()}',
+    filter=billing.BudgetConfigurationFilter(tags=[
+        billing.BudgetConfigurationFilterTagClause(key="tagName",
+                                                   value=billing.BudgetConfigurationFilterClause(
+                                                       operator=billing.BudgetConfigurationFilterOperator.IN,
+                                                       values=["all"]))
+    ]),
+    alert_configurations=[
+        billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+            time_period=billing.AlertConfigurationTimePeriod.MONTH,
+            quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+            trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+            quantity_threshold="100",
+            action_configurations=[
+                billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                    action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                    target="admin@example.com")
+            ])
+    ]))
 
-by_id = a.budgets.get(budget_id=created.budget.budget_id)
+by_id = a.budgets.get(budget_id=created.budget.budget_configuration_id)
 
 # cleanup
-a.budgets.delete(budget_id=created.budget.budget_id)
+a.budgets.delete(budget_id=created.budget.budget_configuration_id)
diff --git a/examples/account/budgets/list_budgets.py b/examples/account/budgets/list_budgets.py
index 303690ab7..dd425dba4 100755
--- a/examples/account/budgets/list_budgets.py
+++ b/examples/account/budgets/list_budgets.py
@@ -1,5 +1,6 @@
 from databricks.sdk import AccountClient
+from databricks.sdk.service import billing
 
 a = AccountClient()
 
-all = a.budgets.list()
+all = a.budgets.list(billing.ListBudgetConfigurationsRequest())
diff --git a/examples/account/budgets/update_budgets.py b/examples/account/budgets/update_budgets.py
index 1a0193b1d..399770058 100755
--- a/examples/account/budgets/update_budgets.py
+++ b/examples/account/budgets/update_budgets.py
@@ -5,24 +5,47 @@
 
 a = AccountClient()
 
-created = a.budgets.create(budget=billing.Budget(
-    name=f'sdk-{time.time_ns()}',
-    filter="tag.tagName = 'all'",
-    period="1 month",
-    start_date="2022-01-01",
-    target_amount="100",
-    alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)]))
+created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
+    display_name=f'sdk-{time.time_ns()}',
+    filter=billing.BudgetConfigurationFilter(tags=[
+        billing.BudgetConfigurationFilterTagClause(key="tagName",
+                                                   value=billing.BudgetConfigurationFilterClause(
+                                                       operator=billing.BudgetConfigurationFilterOperator.IN,
+                                                       values=["all"]))
+    ]),
+    alert_configurations=[
+        billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+            time_period=billing.AlertConfigurationTimePeriod.MONTH,
+            quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+            trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+            quantity_threshold="100",
+            action_configurations=[
+                billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                    action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                    target="admin@example.com")
+            ])
+    ]))
 
-a.budgets.update(budget_id=created.budget.budget_id,
-                 budget=billing.Budget(name=f'sdk-{time.time_ns()}',
-                                       filter="tag.tagName = 'all'",
-                                       period="1 month",
-                                       start_date="2022-01-01",
-                                       target_amount="100",
-                                       alerts=[
-                                           billing.BudgetAlert(email_notifications=["admin@example.com"],
-                                                               min_percentage=70)
-                                       ]))
+_ = a.budgets.update(
+    budget_id=created.budget.budget_configuration_id,
+    budget=billing.UpdateBudgetConfigurationBudget(
+        budget_configuration_id=created.budget.budget_configuration_id,
+        display_name=f'sdk-{time.time_ns()}',
+        filter=billing.BudgetConfigurationFilter(tags=[
+            billing.BudgetConfigurationFilterTagClause(
+                key="tagName",
+                value=billing.BudgetConfigurationFilterClause(
+                    operator=billing.BudgetConfigurationFilterOperator.IN, values=["all"]))
+        ]),
+        alert_configurations=[
+            billing.AlertConfiguration(
+                alert_configuration_id=created.budget.alert_configurations[0].alert_configuration_id,
+                time_period=billing.AlertConfigurationTimePeriod.MONTH,
+                quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+                trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+                quantity_threshold="50",
+                action_configurations=created.budget.alert_configurations[0].action_configurations)
+        ]))
 
 # cleanup
-a.budgets.delete(budget_id=created.budget.budget_id)
+a.budgets.delete(budget_id=created.budget.budget_configuration_id)
diff --git a/examples/account/io/read_usage_download.py b/examples/account/io/read_usage_download.py
index d52b31b0b..544a1d3f2 100755
--- a/examples/account/io/read_usage_download.py
+++ b/examples/account/io/read_usage_download.py
@@ -2,6 +2,6 @@
 
 a = AccountClient()
 
-resp = a.billable_usage.download(start_month="2023-01", end_month="2023-02")
+resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09")
 
 out = a.io.read(resp.contents)
diff --git a/examples/account/waiter/get_workspaces.py b/examples/account/waiter/get_workspaces.py
new file mode 100755
index 000000000..c682d25c0
--- /dev/null
+++ b/examples/account/waiter/get_workspaces.py
@@ -0,0 +1,5 @@
+from databricks.sdk import AccountClient
+
+a = AccountClient()
+
+created = a.waiter.get()
diff --git a/examples/account/workspaces/create_workspaces.py b/examples/account/workspaces/create_workspaces.py
index bf820677c..c2ff96ef2 100755
--- a/examples/account/workspaces/create_workspaces.py
+++ b/examples/account/workspaces/create_workspaces.py
@@ -15,12 +15,12 @@
     aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
         role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
 
-created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
-                              aws_region=os.environ["AWS_REGION"],
-                              credentials_id=role.credentials_id,
-                              storage_configuration_id=storage.storage_configuration_id).result()
+waiter = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
+                             aws_region=os.environ["AWS_REGION"],
+                             credentials_id=role.credentials_id,
+                             storage_configuration_id=storage.storage_configuration_id)
 
 # cleanup
 a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
 a.credentials.delete(credentials_id=role.credentials_id)
-a.workspaces.delete(workspace_id=created.workspace_id)
+a.workspaces.delete(workspace_id=waiter.workspace_id)
diff --git a/examples/account/workspaces/get_workspaces.py b/examples/account/workspaces/get_workspaces.py
index 809a1f374..cd05630c7 100755
--- a/examples/account/workspaces/get_workspaces.py
+++ b/examples/account/workspaces/get_workspaces.py
@@ -1,28 +1,7 @@
-import os
-import time
-
 from databricks.sdk import AccountClient
-from databricks.sdk.service import provisioning
 
 a = AccountClient()
 
-storage = a.storage.create(
-    storage_configuration_name=f'sdk-{time.time_ns()}',
-    root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]))
-
-role = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
-
-created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
-                              aws_region=os.environ["AWS_REGION"],
-                              credentials_id=role.credentials_id,
-                              storage_configuration_id=storage.storage_configuration_id).result()
+created = a.waiter.get()
 
 by_id = a.workspaces.get(workspace_id=created.workspace_id)
-
-# cleanup
-a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
-a.credentials.delete(credentials_id=role.credentials_id)
-a.workspaces.delete(workspace_id=created.workspace_id)
diff --git a/examples/account/workspaces/update_workspaces.py b/examples/account/workspaces/update_workspaces.py
index f766e1b5a..e93450722 100755
--- a/examples/account/workspaces/update_workspaces.py
+++ b/examples/account/workspaces/update_workspaces.py
@@ -6,29 +6,14 @@
 
 a = AccountClient()
 
-storage = a.storage.create(
-    storage_configuration_name=f'sdk-{time.time_ns()}',
-    root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]))
-
-role = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
-
 update_role = a.credentials.create(
     credentials_name=f'sdk-{time.time_ns()}',
     aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
         role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
 
-created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
-                              aws_region=os.environ["AWS_REGION"],
-                              credentials_id=role.credentials_id,
-                              storage_configuration_id=storage.storage_configuration_id).result()
+created = a.waiter.get()
 
 _ = a.workspaces.update(workspace_id=created.workspace_id, credentials_id=update_role.credentials_id).result()
 
 # cleanup
-a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
-a.credentials.delete(credentials_id=role.credentials_id)
 a.credentials.delete(credentials_id=update_role.credentials_id)
-a.workspaces.delete(workspace_id=created.workspace_id)
diff --git a/examples/external_browser_auth.py b/examples/external_browser_auth.py
new file mode 100644
index 000000000..061ff60c7
--- /dev/null
+++ b/examples/external_browser_auth.py
@@ -0,0 +1,72 @@
+from databricks.sdk import WorkspaceClient
+import argparse
+import logging
+
+logging.basicConfig(level=logging.DEBUG)
+
+
+def register_custom_app(confidential: bool) -> tuple[str, str]:
+    """Creates new Custom OAuth App in Databricks Account"""
+    logging.info("No OAuth custom app client/secret provided, creating new app")
+
+    from databricks.sdk import AccountClient
+
+    account_client = AccountClient()
+
+    custom_app = account_client.custom_app_integration.create(
+        name="external-browser-demo",
+        redirect_urls=[
+            f"http://localhost:8020",
+        ],
+        confidential=confidential,
+        scopes=["all-apis"],
+    )
+    logging.info(f"Created new custom app: "
+                 f"--client_id {custom_app.client_id} "
+                 f"{'--client_secret ' + custom_app.client_secret if confidential else ''}")
+
+    return custom_app.client_id, custom_app.client_secret
+
+
+def delete_custom_app(client_id: str):
+    """Creates new Custom OAuth App in Databricks Account"""
+    logging.info(f"Deleting custom app {client_id}")
+    from databricks.sdk import AccountClient
+    account_client = AccountClient()
+    account_client.custom_app_integration.delete(client_id)
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--host", help="Databricks host", required=True)
+    parser.add_argument("--client_id", help="Databricks client_id", default=None)
+    parser.add_argument("--azure_client_id", help="Databricks azure_client_id", default=None)
+    parser.add_argument("--client_secret", help="Databricks client_secret", default=None)
+    parser.add_argument("--azure_client_secret", help="Databricks azure_client_secret", default=None)
+    parser.add_argument("--register-custom-app", action="store_true", help="Register a new custom app")
+    parser.add_argument("--register-custom-app-confidential", action="store_true", help="Register a new custom app")
+    namespace = parser.parse_args()
+    if namespace.register_custom_app and (namespace.client_id is not None or namespace.azure_client_id is not None):
+        raise ValueError("Cannot register custom app and provide --client_id/--azure_client_id at the same time")
+    if not namespace.register_custom_app and namespace.client_id is None and namespace.azure_client_secret is None:
+        raise ValueError("Must provide --client_id/--azure_client_id or register a custom app")
+    if namespace.register_custom_app:
+        client_id, client_secret = register_custom_app(namespace.register_custom_app_confidential)
+    else:
+        client_id, client_secret = namespace.client_id, namespace.client_secret
+
+    w = WorkspaceClient(
+        host=namespace.host,
+        client_id=client_id,
+        client_secret=client_secret,
+        azure_client_id=namespace.azure_client_id,
+        azure_client_secret=namespace.azure_client_secret,
+        auth_type="external-browser",
+    )
+    me = w.current_user.me()
+    print(me)
+
+    if namespace.register_custom_app:
+        delete_custom_app(client_id)
+
+
diff --git a/examples/flask_app_with_oauth.py b/examples/flask_app_with_oauth.py
index 4128de5ca..7c18eadc7 100755
--- a/examples/flask_app_with_oauth.py
+++ b/examples/flask_app_with_oauth.py
@@ -31,20 +31,21 @@
 import logging
 import sys
 
-from databricks.sdk.oauth import OAuthClient
+from databricks.sdk.oauth import OAuthClient, get_workspace_endpoints
+from databricks.sdk.service.compute import ListClustersFilterBy, State
 
 APP_NAME = "flask-demo"
 all_clusters_template = """"""
 
 
-def create_flask_app(oauth_client: OAuthClient):
+def create_flask_app(workspace_host: str, client_id: str, client_secret: str):
     """The create_flask_app function creates a Flask app that is enabled with OAuth.
 
     It initializes the app and web session secret keys with a randomly generated token. It defines two routes for
@@ -64,7 +65,7 @@ def callback():
         the callback parameters, and redirects the user to the index page."""
         from databricks.sdk.oauth import Consent
 
-        consent = Consent.from_dict(oauth_client, session["consent"])
+        consent = Consent.from_dict(session["consent"], client_secret=client_secret)
         session["creds"] = consent.exchange_callback_parameters(request.args).as_dict()
         return redirect(url_for("index"))
 
@@ -72,21 +73,34 @@ def callback():
     def index():
         """The index page checks if the user has already authenticated and retrieves the user's credentials using
         the Databricks SDK WorkspaceClient. It then renders the template with the clusters' list."""
+        oidc_endpoints = get_workspace_endpoints(workspace_host)
+        port = request.environ.get("SERVER_PORT")
+        redirect_url=f"http://localhost:{port}/callback"
         if "creds" not in session:
+            oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
+                                       client_id=client_id,
+                                       client_secret=client_secret,
+                                       redirect_url=redirect_url)
             consent = oauth_client.initiate_consent()
             session["consent"] = consent.as_dict()
-            return redirect(consent.auth_url)
+            return redirect(consent.authorization_url)
 
         from databricks.sdk import WorkspaceClient
         from databricks.sdk.oauth import SessionCredentials
 
-        credentials_provider = SessionCredentials.from_dict(oauth_client, session["creds"])
-        workspace_client = WorkspaceClient(host=oauth_client.host,
+        credentials_strategy = SessionCredentials.from_dict(session["creds"],
+                                                            token_endpoint=oidc_endpoints.token_endpoint,
+                                                            client_id=client_id,
+                                                            client_secret=client_secret,
+                                                            redirect_url=redirect_url)
+        workspace_client = WorkspaceClient(host=workspace_host,
                                            product=APP_NAME,
-                                           credentials_provider=credentials_provider,
+                                           credentials_strategy=credentials_strategy,
                                            )
-
-        return render_template_string(all_clusters_template, w=workspace_client)
+        clusters = workspace_client.clusters.list(
+            filter_by=ListClustersFilterBy(cluster_states=[State.RUNNING, State.PENDING])
+        )
+        return render_template_string(all_clusters_template, workspace_host=workspace_host, clusters=clusters)
 
     return app
 
@@ -100,7 +114,11 @@ def register_custom_app(args: argparse.Namespace) -> tuple[str, str]:
     account_client = AccountClient(profile=args.profile)
 
     custom_app = account_client.custom_app_integration.create(
-        name=APP_NAME, redirect_urls=[f"http://localhost:{args.port}/callback"], confidential=True,
+        name=APP_NAME,
+        redirect_urls=[
+            f"http://localhost:{args.port}/callback",
+        ],
+        confidential=True,
         scopes=["all-apis"],
     )
     logging.info(f"Created new custom app: "
@@ -110,22 +128,6 @@ def register_custom_app(args: argparse.Namespace) -> tuple[str, str]:
     return custom_app.client_id, custom_app.client_secret
 
 
-def init_oauth_config(args) -> OAuthClient:
-    """Creates Databricks SDK configuration for OAuth"""
-    oauth_client = OAuthClient(host=args.host,
-                               client_id=args.client_id,
-                               client_secret=args.client_secret,
-                               redirect_url=f"http://localhost:{args.port}/callback",
-                               scopes=["all-apis"],
-                               )
-    if not oauth_client.client_id:
-        client_id, client_secret = register_custom_app(args)
-        oauth_client.client_id = client_id
-        oauth_client.client_secret = client_secret
-
-    return oauth_client
-
-
 def parse_arguments() -> argparse.Namespace:
     """Parses arguments for this demo"""
     parser = argparse.ArgumentParser(prog=APP_NAME, description=__doc__.strip())
@@ -145,8 +147,10 @@ def parse_arguments() -> argparse.Namespace:
     logging.getLogger("databricks.sdk").setLevel(logging.DEBUG)
 
     args = parse_arguments()
-    oauth_cfg = init_oauth_config(args)
-    app = create_flask_app(oauth_cfg)
+    client_id, client_secret = args.client_id, args.client_secret
+    if not client_id:
+        client_id, client_secret = register_custom_app(args)
+    app = create_flask_app(args.host, client_id, client_secret)
 
     app.run(
         host="localhost",
diff --git a/examples/workspace/alerts/create_alerts.py b/examples/workspace/alerts/create_alerts.py
index 72367ca88..bae1ecf45 100755
--- a/examples/workspace/alerts/create_alerts.py
+++ b/examples/workspace/alerts/create_alerts.py
@@ -7,15 +7,21 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                         data_source_id=srcs[0].id,
-                         description="test query from Go SDK",
-                         query="SELECT 1")
+query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                           warehouse_id=srcs[0].warehouse_id,
+                                                           description="test query from Go SDK",
+                                                           query_text="SELECT 1"))
 
-alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"),
-                        name=f'sdk-{time.time_ns()}',
-                        query_id=query.id)
+alert = w.alerts.create(
+    alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
+        column=sql.AlertOperandColumn(name="1")),
+                                                                   op=sql.AlertOperator.EQUAL,
+                                                                   threshold=sql.AlertConditionThreshold(
+                                                                       value=sql.AlertOperandValue(
+                                                                           double_value=1))),
+                                      display_name=f'sdk-{time.time_ns()}',
+                                      query_id=query.id))
 
 # cleanup
-w.queries.delete(query_id=query.id)
-w.alerts.delete(alert_id=alert.id)
+w.queries.delete(id=query.id)
+w.alerts.delete(id=alert.id)
diff --git a/examples/workspace/alerts/get_alerts.py b/examples/workspace/alerts/get_alerts.py
index 3c24e8566..a1a861b14 100755
--- a/examples/workspace/alerts/get_alerts.py
+++ b/examples/workspace/alerts/get_alerts.py
@@ -7,17 +7,23 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                         data_source_id=srcs[0].id,
-                         description="test query from Go SDK",
-                         query="SELECT 1")
+query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                           warehouse_id=srcs[0].warehouse_id,
+                                                           description="test query from Go SDK",
+                                                           query_text="SELECT 1"))
 
-alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"),
-                        name=f'sdk-{time.time_ns()}',
-                        query_id=query.id)
+alert = w.alerts.create(
+    alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
+        column=sql.AlertOperandColumn(name="1")),
+                                                                   op=sql.AlertOperator.EQUAL,
+                                                                   threshold=sql.AlertConditionThreshold(
+                                                                       value=sql.AlertOperandValue(
+                                                                           double_value=1))),
+                                      display_name=f'sdk-{time.time_ns()}',
+                                      query_id=query.id))
 
-by_id = w.alerts.get(alert_id=alert.id)
+by_id = w.alerts.get(id=alert.id)
 
 # cleanup
-w.queries.delete(query_id=query.id)
-w.alerts.delete(alert_id=alert.id)
+w.queries.delete(id=query.id)
+w.alerts.delete(id=alert.id)
diff --git a/examples/workspace/alerts/list_alerts.py b/examples/workspace/alerts/list_alerts.py
index 2009772c5..35e4ce0a2 100755
--- a/examples/workspace/alerts/list_alerts.py
+++ b/examples/workspace/alerts/list_alerts.py
@@ -1,5 +1,6 @@
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sql
 
 w = WorkspaceClient()
 
-all = w.alerts.list()
+all = w.alerts.list(sql.ListAlertsRequest())
diff --git a/examples/workspace/alerts/update_alerts.py b/examples/workspace/alerts/update_alerts.py
index 130f71913..5d1827f9b 100755
--- a/examples/workspace/alerts/update_alerts.py
+++ b/examples/workspace/alerts/update_alerts.py
@@ -7,20 +7,25 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                         data_source_id=srcs[0].id,
-                         description="test query from Go SDK",
-                         query="SELECT 1")
+query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                           warehouse_id=srcs[0].warehouse_id,
+                                                           description="test query from Go SDK",
+                                                           query_text="SELECT 1"))
 
-alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"),
-                        name=f'sdk-{time.time_ns()}',
-                        query_id=query.id)
+alert = w.alerts.create(
+    alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
+        column=sql.AlertOperandColumn(name="1")),
+                                                                   op=sql.AlertOperator.EQUAL,
+                                                                   threshold=sql.AlertConditionThreshold(
+                                                                       value=sql.AlertOperandValue(
+                                                                           double_value=1))),
+                                      display_name=f'sdk-{time.time_ns()}',
+                                      query_id=query.id))
 
-w.alerts.update(options=sql.AlertOptions(column="1", op="==", value="1"),
-                alert_id=alert.id,
-                name=f'sdk-{time.time_ns()}',
-                query_id=query.id)
+_ = w.alerts.update(id=alert.id,
+                    alert=sql.UpdateAlertRequestAlert(display_name=f'sdk-{time.time_ns()}'),
+                    update_mask="display_name")
 
 # cleanup
-w.queries.delete(query_id=query.id)
-w.alerts.delete(alert_id=alert.id)
+w.queries.delete(id=query.id)
+w.alerts.delete(id=alert.id)
diff --git a/examples/workspace/queries/create_alerts.py b/examples/workspace/queries/create_alerts.py
index 37d71ac60..f0213aea9 100755
--- a/examples/workspace/queries/create_alerts.py
+++ b/examples/workspace/queries/create_alerts.py
@@ -1,15 +1,16 @@
 import time
 
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sql
 
 w = WorkspaceClient()
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                         data_source_id=srcs[0].id,
-                         description="test query from Go SDK",
-                         query="SELECT 1")
+query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                           warehouse_id=srcs[0].warehouse_id,
+                                                           description="test query from Go SDK",
+                                                           query_text="SELECT 1"))
 
 # cleanup
-w.queries.delete(query_id=query.id)
+w.queries.delete(id=query.id)
diff --git a/examples/workspace/queries/create_queries.py b/examples/workspace/queries/create_queries.py
index c8d5ac93d..ce293d410 100755
--- a/examples/workspace/queries/create_queries.py
+++ b/examples/workspace/queries/create_queries.py
@@ -1,15 +1,16 @@
 import time
 
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sql
 
 w = WorkspaceClient()
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                         data_source_id=srcs[0].id,
-                         description="test query from Go SDK",
-                         query="SHOW TABLES")
+query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                           warehouse_id=srcs[0].warehouse_id,
+                                                           description="test query from Go SDK",
+                                                           query_text="SHOW TABLES"))
 
 # cleanup
-w.queries.delete(query_id=query.id)
+w.queries.delete(id=query.id)
diff --git a/examples/workspace/queries/get_queries.py b/examples/workspace/queries/get_queries.py
index d29b75982..f1854d306 100755
--- a/examples/workspace/queries/get_queries.py
+++ b/examples/workspace/queries/get_queries.py
@@ -1,17 +1,18 @@
 import time
 
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sql
 
 w = WorkspaceClient()
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                         data_source_id=srcs[0].id,
-                         description="test query from Go SDK",
-                         query="SHOW TABLES")
+query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                           warehouse_id=srcs[0].warehouse_id,
+                                                           description="test query from Go SDK",
+                                                           query_text="SHOW TABLES"))
 
-by_id = w.queries.get(query_id=query.id)
+by_id = w.queries.get(id=query.id)
 
 # cleanup
-w.queries.delete(query_id=query.id)
+w.queries.delete(id=query.id)
diff --git a/examples/workspace/queries/update_queries.py b/examples/workspace/queries/update_queries.py
index 85a9609ad..948d9a916 100755
--- a/examples/workspace/queries/update_queries.py
+++ b/examples/workspace/queries/update_queries.py
@@ -1,21 +1,22 @@
 import time
 
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sql
 
 w = WorkspaceClient()
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(name=f'sdk-{time.time_ns()}',
-                         data_source_id=srcs[0].id,
-                         description="test query from Go SDK",
-                         query="SHOW TABLES")
+query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                           warehouse_id=srcs[0].warehouse_id,
+                                                           description="test query from Go SDK",
+                                                           query_text="SHOW TABLES"))
 
-updated = w.queries.update(query_id=query.id,
-                           name=f'sdk-{time.time_ns()}',
-                           data_source_id=srcs[0].id,
-                           description="UPDATED: test query from Go SDK",
-                           query="SELECT 2+2")
+updated = w.queries.update(id=query.id,
+                           query=sql.UpdateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
+                                                             description="UPDATED: test query from Go SDK",
+                                                             query_text="SELECT 2+2"),
+                           update_mask="display_name,description,query_text")
 
 # cleanup
-w.queries.delete(query_id=query.id)
+w.queries.delete(id=query.id)
diff --git a/examples/workspace/shares/list_shares.py b/examples/workspace/shares/list_shares.py
index b8668e7f7..d432854c6 100755
--- a/examples/workspace/shares/list_shares.py
+++ b/examples/workspace/shares/list_shares.py
@@ -1,5 +1,6 @@
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sharing
 
 w = WorkspaceClient()
 
-all = w.shares.list()
+all = w.shares.list(sharing.ListSharesRequest())
diff --git a/examples/workspace/warehouses/create_sql_warehouses.py b/examples/workspace/warehouses/create_sql_warehouses.py
index 15e8f474c..f01b9d5f9 100755
--- a/examples/workspace/warehouses/create_sql_warehouses.py
+++ b/examples/workspace/warehouses/create_sql_warehouses.py
@@ -1,13 +1,18 @@
 import time
 
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sql
 
 w = WorkspaceClient()
 
-created = w.warehouses.create(name=f'sdk-{time.time_ns()}',
-                              cluster_size="2X-Small",
-                              max_num_clusters=1,
-                              auto_stop_mins=10).result()
+created = w.warehouses.create(
+    name=f'sdk-{time.time_ns()}',
+    cluster_size="2X-Small",
+    max_num_clusters=1,
+    auto_stop_mins=10,
+    tags=sql.EndpointTags(
+        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
+                     ])).result()
 
 # cleanup
 w.warehouses.delete(id=created.id)
diff --git a/examples/workspace/warehouses/edit_sql_warehouses.py b/examples/workspace/warehouses/edit_sql_warehouses.py
index 0e3c8e8f0..acf06035a 100755
--- a/examples/workspace/warehouses/edit_sql_warehouses.py
+++ b/examples/workspace/warehouses/edit_sql_warehouses.py
@@ -1,13 +1,18 @@
 import time
 
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sql
 
 w = WorkspaceClient()
 
-created = w.warehouses.create(name=f'sdk-{time.time_ns()}',
-                              cluster_size="2X-Small",
-                              max_num_clusters=1,
-                              auto_stop_mins=10).result()
+created = w.warehouses.create(
+    name=f'sdk-{time.time_ns()}',
+    cluster_size="2X-Small",
+    max_num_clusters=1,
+    auto_stop_mins=10,
+    tags=sql.EndpointTags(
+        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
+                     ])).result()
 
 _ = w.warehouses.edit(id=created.id,
                       name=f'sdk-{time.time_ns()}',
diff --git a/examples/workspace/warehouses/get_sql_warehouses.py b/examples/workspace/warehouses/get_sql_warehouses.py
index 7b59844ca..9f8184ab7 100755
--- a/examples/workspace/warehouses/get_sql_warehouses.py
+++ b/examples/workspace/warehouses/get_sql_warehouses.py
@@ -1,13 +1,18 @@
 import time
 
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import sql
 
 w = WorkspaceClient()
 
-created = w.warehouses.create(name=f'sdk-{time.time_ns()}',
-                              cluster_size="2X-Small",
-                              max_num_clusters=1,
-                              auto_stop_mins=10).result()
+created = w.warehouses.create(
+    name=f'sdk-{time.time_ns()}',
+    cluster_size="2X-Small",
+    max_num_clusters=1,
+    auto_stop_mins=10,
+    tags=sql.EndpointTags(
+        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
+                     ])).result()
 
 wh = w.warehouses.get(id=created.id)
 
diff --git a/setup.py b/setup.py
index 8d7d2e3b3..021f4e430 100644
--- a/setup.py
+++ b/setup.py
@@ -8,34 +8,55 @@
 with version_file.open('r') as f:
     exec(f.read(), version_data)
 
-setup(name="sync-databricks-sdk",
-      version=version_data['__version__'],
-      packages=find_packages(exclude=["tests", "*tests.*", "*tests"]),
-      package_data = {"databricks.sdk": ["py.typed"]},
-      python_requires=">=3.7",
-      install_requires=["requests>=2.28.1,<3", "google-auth~=2.0"],
-      extras_require={"dev": ["pytest", "pytest-cov", "pytest-xdist", "pytest-mock",
-                              "yapf", "pycodestyle", "autoflake", "isort", "wheel",
-                              "ipython", "ipywidgets", "requests-mock", "pyfakefs",
-                              "databricks-connect", "pytest-rerunfailures"],
-                      "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"]},
-      author="Sync Computing",
-      author_email="info@synccomputing.com",
-      description="Sync Fork Databricks SDK for Python (Beta)",
-      long_description=io.open("README.md", encoding="utf-8").read(),
-      long_description_content_type='text/markdown',
-      url="https://databricks-sdk-py.readthedocs.io",
-      keywords="databricks sdk",
-      classifiers=[
-          "Development Status :: 4 - Beta",
-          "Intended Audience :: Developers",
-          "Intended Audience :: Science/Research",
-          "Intended Audience :: System Administrators",
-          "License :: OSI Approved :: Apache Software License",
-          "Programming Language :: Python :: 3.7",
-          "Programming Language :: Python :: 3.8",
-          "Programming Language :: Python :: 3.9",
-          "Programming Language :: Python :: 3.10",
-          "Programming Language :: Python :: 3.11",
-          "Programming Language :: Python :: 3.12",
-          "Operating System :: OS Independent"])
+setup(
+    name="sync-databricks-sdk",
+    version=version_data["__version__"],
+    packages=find_packages(exclude=["tests", "*tests.*", "*tests"]),
+    package_data={"databricks.sdk": ["py.typed"]},
+    python_requires=">=3.7",
+    install_requires=["requests>=2.28.1,<3", "google-auth~=2.0"],
+    extras_require={
+        "dev": [
+            "pytest",
+            "pytest-cov",
+            "pytest-xdist",
+            "pytest-mock",
+            "yapf",
+            "pycodestyle",
+            "autoflake",
+            "isort",
+            "wheel",
+            "ipython",
+            "ipywidgets",
+            "requests-mock",
+            "pyfakefs",
+            "databricks-connect",
+            "pytest-rerunfailures",
+            "openai",
+            'langchain-openai; python_version > "3.7"',
+            "httpx",
+        ],
+        "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"],
+    },
+    author="Sync Computing",
+    author_email="info@synccomputing.com",
+    description="Sync Fork Databricks SDK for Python (Beta)",
+    long_description=io.open("README.md", encoding="utf-8").read(),
+    long_description_content_type="text/markdown",
+    url="https://databricks-sdk-py.readthedocs.io",
+    keywords="databricks sdk",
+    classifiers=[
+        "Development Status :: 4 - Beta",
+        "Intended Audience :: Developers",
+        "Intended Audience :: Science/Research",
+        "Intended Audience :: System Administrators",
+        "License :: OSI Approved :: Apache Software License",
+        "Programming Language :: Python :: 3.7",
+        "Programming Language :: Python :: 3.8",
+        "Programming Language :: Python :: 3.9",
+        "Programming Language :: Python :: 3.10",
+        "Programming Language :: Python :: 3.11",
+        "Programming Language :: Python :: 3.12",
+        "Operating System :: OS Independent",
+    ],
+)
diff --git a/tests/conftest.py b/tests/conftest.py
index 9d1c26b38..0f415ecf1 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,5 +1,6 @@
 import functools
 import os
+import platform
 
 import pytest as pytest
 from pyfakefs.fake_filesystem_unittest import Patcher
@@ -38,7 +39,11 @@ def wrapper(*args, **kwargs):
             with pytest.raises(ValueError) as info:
                 func(*args, **kwargs)
             exception_str = str(info.value)
-            exception_str = exception_str.replace(__tests__ + '/', '')
+            if platform.system() == 'Windows':
+                exception_str = exception_str.replace(__tests__ + '\\', '')
+                exception_str = exception_str.replace('\\', '/')
+            else:
+                exception_str = exception_str.replace(__tests__ + '/', '')
             assert msg in exception_str
 
         return wrapper
@@ -57,3 +62,31 @@ def fake_fs():
         patcher.fs.add_real_directory(test_data_path)
 
         yield patcher.fs # This will return a fake file system
+
+
+def set_home(monkeypatch, path):
+    if platform.system() == 'Windows':
+        monkeypatch.setenv('USERPROFILE', __tests__ + path)
+    else:
+        monkeypatch.setenv('HOME', __tests__ + path)
+
+
+def set_az_path(monkeypatch):
+    if platform.system() == 'Windows':
+        monkeypatch.setenv('Path', __tests__ + "\\testdata\\windows\\")
+        monkeypatch.setenv('COMSPEC', 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe')
+    else:
+        monkeypatch.setenv('PATH', __tests__ + "/testdata:/bin")
+
+
+@pytest.fixture
+def mock_tenant(requests_mock):
+
+    def stub_tenant_request(host, tenant_id="test-tenant-id"):
+        mock = requests_mock.get(
+            f'https://{host}/aad/auth',
+            status_code=302,
+            headers={'Location': f'https://login.microsoftonline.com/{tenant_id}/oauth2/authorize'})
+        return mock
+
+    return stub_tenant_request
diff --git a/tests/fixture_server.py b/tests/fixture_server.py
new file mode 100644
index 000000000..e15f9cf2d
--- /dev/null
+++ b/tests/fixture_server.py
@@ -0,0 +1,31 @@
+import contextlib
+import functools
+import typing
+from http.server import BaseHTTPRequestHandler
+
+
+@contextlib.contextmanager
+def http_fixture_server(handler: typing.Callable[[BaseHTTPRequestHandler], None]):
+    from http.server import HTTPServer
+    from threading import Thread
+
+    class _handler(BaseHTTPRequestHandler):
+
+        def __init__(self, handler: typing.Callable[[BaseHTTPRequestHandler], None], *args):
+            self._handler = handler
+            super().__init__(*args)
+
+        def __getattr__(self, item):
+            if 'do_' != item[0:3]:
+                raise AttributeError(f'method {item} not found')
+            return functools.partial(self._handler, self)
+
+    handler_factory = functools.partial(_handler, handler)
+    srv = HTTPServer(('localhost', 0), handler_factory)
+    t = Thread(target=srv.serve_forever)
+    try:
+        t.daemon = True
+        t.start()
+        yield 'http://{0}:{1}'.format(*srv.server_address)
+    finally:
+        srv.shutdown()
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index 221cc7d49..e9c5430dd 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -59,6 +59,18 @@ def a(env_or_skip) -> AccountClient:
     return account_client
 
 
+@pytest.fixture(scope='session')
+def ucacct(env_or_skip) -> AccountClient:
+    _load_debug_env_if_runs_from_ide('ucacct')
+    env_or_skip("CLOUD_ENV")
+    account_client = AccountClient()
+    if not account_client.config.is_account_client:
+        pytest.skip("not Databricks Account client")
+    if 'TEST_METASTORE_ID' not in os.environ:
+        pytest.skip("not in Unity Catalog Workspace test env")
+    return account_client
+
+
 @pytest.fixture(scope='session')
 def w(env_or_skip) -> WorkspaceClient:
     _load_debug_env_if_runs_from_ide('workspace')
@@ -104,6 +116,14 @@ def volume(ucws, schema):
     ucws.volumes.delete(volume.full_name)
 
 
+@pytest.fixture()
+def workspace_dir(w, random):
+    directory = f'/Users/{w.current_user.me().user_name}/dir-{random(12)}'
+    w.workspace.mkdirs(directory)
+    yield directory
+    w.workspace.delete(directory, recursive=True)
+
+
 def _load_debug_env_if_runs_from_ide(key) -> bool:
     if not _is_in_debug():
         return False
diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py
index 4eca0d1d3..0bf7f951d 100644
--- a/tests/integration/test_auth.py
+++ b/tests/integration/test_auth.py
@@ -5,6 +5,7 @@
 import shutil
 import subprocess
 import sys
+import typing
 import urllib.parse
 from functools import partial
 from pathlib import Path
@@ -12,7 +13,7 @@
 import pytest
 
 from databricks.sdk.service.compute import (ClusterSpec, DataSecurityMode,
-                                            Library, ResultType)
+                                            Library, ResultType, SparkVersion)
 from databricks.sdk.service.jobs import NotebookTask, Task, ViewType
 from databricks.sdk.service.workspace import ImportFormat
 
@@ -84,19 +85,41 @@ def test_runtime_auth_from_interactive_on_uc(ucws, fresh_wheel_file, env_or_skip
         ucws.clusters.permanent_delete(interactive_cluster.cluster_id)
 
 
-def test_runtime_auth_from_jobs(w, fresh_wheel_file, env_or_skip, random):
-    instance_pool_id = env_or_skip('TEST_INSTANCE_POOL_ID')
-
+def _get_lts_versions(w) -> typing.List[SparkVersion]:
     v = w.clusters.spark_versions()
     lts_runtimes = [
         x for x in v.versions
         if 'LTS' in x.name and '-ml' not in x.key and '-photon' not in x.key and '-aarch64' not in x.key
     ]
+    return lts_runtimes
+
+
+def test_runtime_auth_from_jobs_volumes(ucws, fresh_wheel_file, env_or_skip, random, volume):
+    dbr_versions = [v for v in _get_lts_versions(ucws) if int(v.key.split('.')[0]) >= 15]
+
+    volume_wheel = f'{volume}/tmp/wheels/{random(10)}/{fresh_wheel_file.name}'
+    with fresh_wheel_file.open('rb') as f:
+        ucws.files.upload(volume_wheel, f)
+
+    lib = Library(whl=volume_wheel)
+    return _test_runtime_auth_from_jobs_inner(ucws, env_or_skip, random, dbr_versions, lib)
+
+
+def test_runtime_auth_from_jobs_dbfs(w, fresh_wheel_file, env_or_skip, random):
+    # Library installation from DBFS is not supported past DBR 14.3
+    dbr_versions = [v for v in _get_lts_versions(w) if int(v.key.split('.')[0]) < 15]
 
     dbfs_wheel = f'/tmp/wheels/{random(10)}/{fresh_wheel_file.name}'
     with fresh_wheel_file.open('rb') as f:
         w.dbfs.upload(dbfs_wheel, f)
 
+    lib = Library(whl=f'dbfs:{dbfs_wheel}')
+    return _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib)
+
+
+def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, library):
+    instance_pool_id = env_or_skip('TEST_INSTANCE_POOL_ID')
+
     my_name = w.current_user.me().user_name
     notebook_path = f'/Users/{my_name}/notebook-native-auth'
     notebook_content = io.BytesIO(b'''
@@ -109,16 +132,20 @@ def test_runtime_auth_from_jobs(w, fresh_wheel_file, env_or_skip, random):
     w.workspace.upload(notebook_path, notebook_content, language=Language.PYTHON, overwrite=True)
 
     tasks = []
-    for v in lts_runtimes:
+    for v in dbr_versions:
         t = Task(task_key=f'test_{v.key.replace(".", "_")}',
                  notebook_task=NotebookTask(notebook_path=notebook_path),
-                 new_cluster=ClusterSpec(spark_version=v.key,
-                                         num_workers=1,
-                                         instance_pool_id=instance_pool_id),
-                 libraries=[Library(whl=f'dbfs:{dbfs_wheel}')])
+                 new_cluster=ClusterSpec(
+                     spark_version=v.key,
+                     num_workers=1,
+                     instance_pool_id=instance_pool_id,
+                     # GCP uses "custom" data security mode by default, which does not support UC.
+                     data_security_mode=DataSecurityMode.SINGLE_USER),
+                 libraries=[library])
         tasks.append(t)
 
-    run = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks).result()
+    waiter = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks)
+    run = waiter.result()
     for task_key, output in _task_outputs(w, run).items():
         assert my_name in output, f'{task_key} does not work with notebook native auth'
 
diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py
index 2c4c15ba8..eab9c4713 100644
--- a/tests/integration/test_client.py
+++ b/tests/integration/test_client.py
@@ -1,10 +1,8 @@
-import pytest
-
-
-def test_get_workspace_client(a, env_or_skip):
+def test_get_workspace_client(ucacct, env_or_skip):
+    # Need to switch to ucacct
     workspace_id = env_or_skip("TEST_WORKSPACE_ID")
-    ws = a.workspaces.get(workspace_id)
-    w = a.get_workspace_client(ws)
+    ws = ucacct.workspaces.get(workspace_id)
+    w = ucacct.get_workspace_client(ws)
     assert w.current_user.me().active
 
 
@@ -13,11 +11,9 @@ def test_get_workspace_id(ucws, env_or_skip):
     assert ucws.get_workspace_id() == ws_id
 
 
-def test_creating_ws_client_from_ac_client_does_not_override_config(a):
-    wss = list(a.workspaces.list())
-    if len(wss) == 0:
-        pytest.skip("no workspaces")
-    a.get_workspace_client(wss[0])
-
-    # assert doesn't throw
-    wss = list(a.workspaces.list())
+def test_creating_ws_client_from_ac_client_does_not_override_config(ucacct, env_or_skip):
+    ws_id = env_or_skip('TEST_WORKSPACE_ID')
+    ws = ucacct.workspaces.get(ws_id)
+    w = ucacct.get_workspace_client(ws)
+    me = w.current_user.me()
+    assert me.user_name is not None
diff --git a/tests/integration/test_dbutils.py b/tests/integration/test_dbutils.py
index a7e780eb3..e6e2a8668 100644
--- a/tests/integration/test_dbutils.py
+++ b/tests/integration/test_dbutils.py
@@ -51,6 +51,13 @@ def test_large_put(fs_and_base_path):
     assert output == ("test" * 20000)[:65536]
 
 
+def test_put_local_path(w, random, tmp_path):
+    to_write = random(1024 * 1024 * 2)
+    tmp_path = tmp_path / "tmp_file"
+    w.dbutils.fs.put(f'file:{tmp_path}', to_write, True)
+    assert w.dbutils.fs.head(f'file:{tmp_path}', 1024 * 1024 * 2) == to_write
+
+
 def test_cp_file(fs_and_base_path, random):
     fs, base_path = fs_and_base_path
     path = base_path + "/dbc_qa_file-" + random()
diff --git a/tests/integration/test_files.py b/tests/integration/test_files.py
index 35750ab75..7b9ede556 100644
--- a/tests/integration/test_files.py
+++ b/tests/integration/test_files.py
@@ -1,6 +1,7 @@
 import io
 import logging
 import pathlib
+import platform
 import time
 from typing import Callable, List, Tuple, Union
 
@@ -11,7 +12,10 @@
 
 
 def test_local_io(random):
-    dummy_file = f'/tmp/{random()}'
+    if platform.system() == 'Windows':
+        dummy_file = f'C:\\Windows\\Temp\\{random()}'
+    else:
+        dummy_file = f'/tmp/{random()}'
     to_write = random(1024 * 1024 * 2.5).encode()
     with open(dummy_file, 'wb') as f:
         written = f.write(to_write)
diff --git a/tests/integration/test_iam.py b/tests/integration/test_iam.py
index f5120f546..f8d7c3b1f 100644
--- a/tests/integration/test_iam.py
+++ b/tests/integration/test_iam.py
@@ -1,5 +1,6 @@
 import pytest
 
+from databricks.sdk import errors
 from databricks.sdk.core import DatabricksError
 
 
@@ -13,7 +14,7 @@ def test_scim_error_unmarshall(w, random):
     with pytest.raises(DatabricksError) as exc_info:
         groups = w.groups.list(filter=random(12))
         next(groups)
-    assert 'Given filter operator is not supported' in str(exc_info.value)
+    assert isinstance(exc_info.value, errors.BadRequest)
 
 
 def test_scim_get_user_as_dict(w):
diff --git a/tests/integration/test_sql.py b/tests/integration/test_sql.py
index 3e178dc3a..af368609b 100644
--- a/tests/integration/test_sql.py
+++ b/tests/integration/test_sql.py
@@ -11,5 +11,5 @@ def date_to_ms(date):
     filter = QueryFilter(query_start_time_range=TimeRange(start_time_ms=date_to_ms('2023-01-01'),
                                                           end_time_ms=date_to_ms('2023-01-02')))
     queries = w.query_history.list(filter_by=filter)
-    for q in queries:
+    for q in queries.res:
         print(q)
diff --git a/tests/integration/test_workspace.py b/tests/integration/test_workspace.py
index afe77c427..4adbee773 100644
--- a/tests/integration/test_workspace.py
+++ b/tests/integration/test_workspace.py
@@ -3,11 +3,21 @@
 from databricks.sdk.service.workspace import ImportFormat, Language
 
 
-def test_workspace_recursive_list(w, random):
+def test_workspace_recursive_list(w, workspace_dir, random):
+    # create a file in the directory
+    file = f'{workspace_dir}/file-{random(12)}.py'
+    w.workspace.upload(file, io.BytesIO(b'print(1)'))
+    # create a subdirectory
+    subdirectory = f'{workspace_dir}/subdir-{random(12)}'
+    w.workspace.mkdirs(subdirectory)
+    # create a file in the subdirectory
+    subfile = f'{subdirectory}/subfile-{random(12)}.py'
+    w.workspace.upload(subfile, io.BytesIO(b'print(2)'))
+    # list the directory recursively
     names = []
-    for i in w.workspace.list(f'/Users/{w.current_user.me().user_name}', recursive=True):
+    for i in w.workspace.list(workspace_dir, recursive=True):
         names.append(i.path)
-    assert len(names) > 0
+    assert len(names) == 2
 
 
 def test_workspace_upload_download_notebooks(w, random):
diff --git a/tests/test_auth.py b/tests/test_auth.py
index 02535c39e..cd8f3cfc1 100644
--- a/tests/test_auth.py
+++ b/tests/test_auth.py
@@ -2,7 +2,7 @@
 # In case of editing this file, make sure the change is propagated to all Databricks SDK codebases
 from databricks.sdk.core import Config
 
-from .conftest import __tests__, raises
+from .conftest import __tests__, raises, set_az_path, set_home
 
 default_auth_base_error_message = \
     "default auth: cannot configure default credentials, " \
@@ -121,19 +121,19 @@ def test_config_config_file(monkeypatch):
 
 @raises(f"{default_auth_base_error_message}. Config: host=https://x")
 def test_config_config_file_skip_default_profile_if_host_specified(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata')
+    set_home(monkeypatch, '/testdata')
     cfg = Config(host='x')
 
 
 @raises(default_auth_base_error_message)
 def test_config_config_file_with_empty_default_profile_select_default(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/empty_default')
+    set_home(monkeypatch, '/testdata/empty_default')
     Config()
 
 
 def test_config_config_file_with_empty_default_profile_select_abc(monkeypatch):
     monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'abc')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/empty_default')
+    set_home(monkeypatch, '/testdata/empty_default')
     cfg = Config()
 
     assert cfg.auth_type == 'pat'
@@ -141,7 +141,7 @@ def test_config_config_file_with_empty_default_profile_select_abc(monkeypatch):
 
 
 def test_config_pat_from_databricks_cfg(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata')
+    set_home(monkeypatch, '/testdata')
     cfg = Config()
 
     assert cfg.auth_type == 'pat'
@@ -150,7 +150,7 @@ def test_config_pat_from_databricks_cfg(monkeypatch):
 
 def test_config_pat_from_databricks_cfg_dot_profile(monkeypatch):
     monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'pat.with.dot')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata')
+    set_home(monkeypatch, '/testdata')
     cfg = Config()
 
     assert cfg.auth_type == 'pat'
@@ -161,7 +161,7 @@ def test_config_pat_from_databricks_cfg_dot_profile(monkeypatch):
     f"{default_auth_base_error_message}. Config: token=***, profile=nohost. Env: DATABRICKS_CONFIG_PROFILE")
 def test_config_pat_from_databricks_cfg_nohost_profile(monkeypatch):
     monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata')
+    set_home(monkeypatch, '/testdata')
     Config()
 
 
@@ -171,7 +171,7 @@ def test_config_pat_from_databricks_cfg_nohost_profile(monkeypatch):
 def test_config_config_profile_and_token(monkeypatch):
     monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost')
     monkeypatch.setenv('DATABRICKS_TOKEN', 'x')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata')
+    set_home(monkeypatch, '/testdata')
     Config()
 
 
@@ -181,7 +181,7 @@ def test_config_config_profile_and_token(monkeypatch):
 def test_config_config_profile_and_password(monkeypatch):
     monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost')
     monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata')
+    set_home(monkeypatch, '/testdata')
     Config()
 
 
@@ -193,9 +193,10 @@ def test_config_azure_pat():
     assert cfg.is_azure
 
 
-def test_config_azure_cli_host(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+def test_config_azure_cli_host(monkeypatch, mock_tenant):
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    mock_tenant('adb-123.4.azuredatabricks.net')
     cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws')
 
     assert cfg.auth_type == 'azure-cli'
@@ -208,14 +209,14 @@ def test_config_azure_cli_host(monkeypatch):
 )
 def test_config_azure_cli_host_fail(monkeypatch):
     monkeypatch.setenv('FAIL', 'yes')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
     cfg = Config(azure_workspace_resource_id='/sub/rg/ws')
 
 
 @raises(f"{default_auth_base_error_message}. Config: azure_workspace_resource_id=/sub/rg/ws")
 def test_config_azure_cli_host_az_not_installed(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
+    set_home(monkeypatch, '/testdata/azure')
     monkeypatch.setenv('PATH', __tests__ + '/whatever')
     cfg = Config(azure_workspace_resource_id='/sub/rg/ws')
 
@@ -224,14 +225,15 @@ def test_config_azure_cli_host_az_not_installed(monkeypatch):
     "validate: more than one authorization method configured: azure and pat. Config: token=***, azure_workspace_resource_id=/sub/rg/ws"
 )
 def test_config_azure_cli_host_pat_conflict_with_config_file_present_without_default_profile(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
     cfg = Config(token='x', azure_workspace_resource_id='/sub/rg/ws')
 
 
-def test_config_azure_cli_host_and_resource_id(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+def test_config_azure_cli_host_and_resource_id(monkeypatch, mock_tenant):
+    set_home(monkeypatch, '/testdata')
+    set_az_path(monkeypatch)
+    mock_tenant('adb-123.4.azuredatabricks.net')
     cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws')
 
     assert cfg.auth_type == 'azure-cli'
@@ -239,10 +241,11 @@ def test_config_azure_cli_host_and_resource_id(monkeypatch):
     assert cfg.is_azure
 
 
-def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeypatch):
+def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeypatch, mock_tenant):
     monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'justhost')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    mock_tenant('adb-123.4.azuredatabricks.net')
     cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws')
 
     assert cfg.auth_type == 'azure-cli'
@@ -255,8 +258,8 @@ def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeyp
 )
 def test_config_azure_and_password_conflict(monkeypatch):
     monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
     cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws')
 
 
@@ -265,7 +268,7 @@ def test_config_azure_and_password_conflict(monkeypatch):
 )
 def test_config_corrupt_config(monkeypatch):
     monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'DEFAULT')
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/corrupt')
+    set_home(monkeypatch, '/testdata/corrupt')
     Config()
 
 
diff --git a/tests/test_auth_manual_tests.py b/tests/test_auth_manual_tests.py
index 07250c532..8c58dd6bf 100644
--- a/tests/test_auth_manual_tests.py
+++ b/tests/test_auth_manual_tests.py
@@ -1,11 +1,14 @@
+import pytest
+
 from databricks.sdk.core import Config
 
-from .conftest import __tests__
+from .conftest import set_az_path, set_home
 
 
-def test_azure_cli_workspace_header_present(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+def test_azure_cli_workspace_header_present(monkeypatch, mock_tenant):
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    mock_tenant('adb-123.4.azuredatabricks.net')
     resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
     cfg = Config(auth_type='azure-cli',
                  host='https://adb-123.4.azuredatabricks.net',
@@ -14,9 +17,10 @@ def test_azure_cli_workspace_header_present(monkeypatch):
     assert cfg.authenticate()['X-Databricks-Azure-Workspace-Resource-Id'] == resource_id
 
 
-def test_azure_cli_user_with_management_access(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+def test_azure_cli_user_with_management_access(monkeypatch, mock_tenant):
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    mock_tenant('adb-123.4.azuredatabricks.net')
     resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
     cfg = Config(auth_type='azure-cli',
                  host='https://adb-123.4.azuredatabricks.net',
@@ -24,9 +28,10 @@ def test_azure_cli_user_with_management_access(monkeypatch):
     assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate()
 
 
-def test_azure_cli_user_no_management_access(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+def test_azure_cli_user_no_management_access(monkeypatch, mock_tenant):
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    mock_tenant('adb-123.4.azuredatabricks.net')
     monkeypatch.setenv('FAIL_IF', 'https://management.core.windows.net/')
     resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
     cfg = Config(auth_type='azure-cli',
@@ -35,9 +40,10 @@ def test_azure_cli_user_no_management_access(monkeypatch):
     assert 'X-Databricks-Azure-SP-Management-Token' not in cfg.authenticate()
 
 
-def test_azure_cli_fallback(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+def test_azure_cli_fallback(monkeypatch, mock_tenant):
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    mock_tenant('adb-123.4.azuredatabricks.net')
     monkeypatch.setenv('FAIL_IF', 'subscription')
     resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
     cfg = Config(auth_type='azure-cli',
@@ -46,12 +52,23 @@ def test_azure_cli_fallback(monkeypatch):
     assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate()
 
 
-def test_azure_cli_with_warning_on_stderr(monkeypatch):
-    monkeypatch.setenv('HOME', __tests__ + '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin')
+def test_azure_cli_with_warning_on_stderr(monkeypatch, mock_tenant):
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    mock_tenant('adb-123.4.azuredatabricks.net')
     monkeypatch.setenv('WARN', 'this is a warning')
     resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
     cfg = Config(auth_type='azure-cli',
                  host='https://adb-123.4.azuredatabricks.net',
                  azure_workspace_resource_id=resource_id)
     assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate()
+
+
+@pytest.mark.parametrize('username', ['systemAssignedIdentity', 'userAssignedIdentity'])
+def test_azure_cli_does_not_specify_tenant_id_with_msi(monkeypatch, username):
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    monkeypatch.setenv('FAIL_IF_TENANT_ID_SET', 'true')
+    monkeypatch.setenv('AZ_USER_NAME', username)
+    monkeypatch.setenv('AZ_USER_TYPE', 'servicePrincipal')
+    cfg = Config(auth_type='azure-cli', host='https://adb-123.4.azuredatabricks.net', azure_tenant_id='abc')
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
new file mode 100644
index 000000000..e9e7324a9
--- /dev/null
+++ b/tests/test_base_client.py
@@ -0,0 +1,278 @@
+from http.server import BaseHTTPRequestHandler
+from typing import Iterator, List
+
+import pytest
+import requests
+
+from databricks.sdk import errors, useragent
+from databricks.sdk._base_client import _BaseClient, _StreamingResponse
+from databricks.sdk.core import DatabricksError
+
+from .clock import FakeClock
+from .fixture_server import http_fixture_server
+
+
+class DummyResponse(requests.Response):
+    _content: Iterator[bytes]
+    _closed: bool = False
+
+    def __init__(self, content: List[bytes]) -> None:
+        super().__init__()
+        self._content = iter(content)
+
+    def iter_content(self, chunk_size: int = 1, decode_unicode=False) -> Iterator[bytes]:
+        return self._content
+
+    def close(self):
+        self._closed = True
+
+    def isClosed(self):
+        return self._closed
+
+
+def test_streaming_response_read(config):
+    content = b"some initial binary data: \x00\x01"
+    response = _StreamingResponse(DummyResponse([content]))
+    assert response.read() == content
+
+
+def test_streaming_response_read_partial(config):
+    content = b"some initial binary data: \x00\x01"
+    response = _StreamingResponse(DummyResponse([content]))
+    assert response.read(8) == b"some ini"
+
+
+def test_streaming_response_read_full(config):
+    content = b"some initial binary data: \x00\x01"
+    response = _StreamingResponse(DummyResponse([content, content]))
+    assert response.read() == content + content
+
+
+def test_streaming_response_read_closes(config):
+    content = b"some initial binary data: \x00\x01"
+    dummy_response = DummyResponse([content])
+    with _StreamingResponse(dummy_response) as response:
+        assert response.read() == content
+    assert dummy_response.isClosed()
+
+
+@pytest.mark.parametrize('status_code,headers,body,expected_error', [
+    (400, {}, {
+        "message":
+        "errorMessage",
+        "details": [{
+            "type": DatabricksError._error_info_type,
+            "reason": "error reason",
+            "domain": "error domain",
+            "metadata": {
+                "etag": "error etag"
+            },
+        }, {
+            "type": "wrong type",
+            "reason": "wrong reason",
+            "domain": "wrong domain",
+            "metadata": {
+                "etag": "wrong etag"
+            }
+        }],
+    },
+     errors.BadRequest('errorMessage',
+                       details=[{
+                           'type': DatabricksError._error_info_type,
+                           'reason': 'error reason',
+                           'domain': 'error domain',
+                           'metadata': {
+                               'etag': 'error etag'
+                           },
+                       }])),
+    (401, {}, {
+        'error_code': 'UNAUTHORIZED',
+        'message': 'errorMessage',
+    }, errors.Unauthenticated('errorMessage', error_code='UNAUTHORIZED')),
+    (403, {}, {
+        'error_code': 'FORBIDDEN',
+        'message': 'errorMessage',
+    }, errors.PermissionDenied('errorMessage', error_code='FORBIDDEN')),
+    (429, {}, {
+        'error_code': 'TOO_MANY_REQUESTS',
+        'message': 'errorMessage',
+    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=1)),
+    (429, {
+        'Retry-After': '100'
+    }, {
+        'error_code': 'TOO_MANY_REQUESTS',
+        'message': 'errorMessage',
+    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=100)),
+    (503, {}, {
+        'error_code': 'TEMPORARILY_UNAVAILABLE',
+        'message': 'errorMessage',
+    }, errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
+                                     retry_after_secs=1)),
+    (503, {
+        'Retry-After': '100'
+    }, {
+        'error_code': 'TEMPORARILY_UNAVAILABLE',
+        'message': 'errorMessage',
+    },
+     errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
+                                   retry_after_secs=100)),
+    (404, {}, {
+        'scimType': 'scim type',
+        'detail': 'detail',
+        'status': 'status',
+    }, errors.NotFound('scim type detail', error_code='SCIM_status')),
+])
+def test_error(requests_mock, status_code, headers, body, expected_error):
+    client = _BaseClient(clock=FakeClock())
+    requests_mock.get("/test", json=body, status_code=status_code, headers=headers)
+    with pytest.raises(DatabricksError) as raised:
+        client._perform("GET", "https://localhost/test", headers={"test": "test"})
+    actual = raised.value
+    assert isinstance(actual, type(expected_error))
+    assert str(actual) == str(expected_error)
+    assert actual.error_code == expected_error.error_code
+    assert actual.retry_after_secs == expected_error.retry_after_secs
+    expected_error_infos, actual_error_infos = expected_error.get_error_info(), actual.get_error_info()
+    assert len(expected_error_infos) == len(actual_error_infos)
+    for expected, actual in zip(expected_error_infos, actual_error_infos):
+        assert expected.type == actual.type
+        assert expected.reason == actual.reason
+        assert expected.domain == actual.domain
+        assert expected.metadata == actual.metadata
+
+
+def test_api_client_do_custom_headers(requests_mock):
+    client = _BaseClient()
+    requests_mock.get("/test",
+                      json={"well": "done"},
+                      request_headers={
+                          "test": "test",
+                          "User-Agent": useragent.to_string()
+                      })
+    res = client.do("GET", "https://localhost/test", headers={"test": "test"})
+    assert res == {"well": "done"}
+
+
+@pytest.mark.parametrize('status_code,include_retry_after',
+                         ((429, False), (429, True), (503, False), (503, True)))
+def test_http_retry_after(status_code, include_retry_after):
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) == 0:
+            h.send_response(status_code)
+            if include_retry_after:
+                h.send_header('Retry-After', '1')
+            h.send_header('Content-Type', 'application/json')
+            h.end_headers()
+        else:
+            h.send_response(200)
+            h.send_header('Content-Type', 'application/json')
+            h.end_headers()
+            h.wfile.write(b'{"foo": 1}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        res = api_client.do('GET', f'{host}/foo')
+        assert 'foo' in res
+
+    assert len(requests) == 2
+
+
+def test_http_retry_after_wrong_format():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) == 0:
+            h.send_response(429)
+            h.send_header('Retry-After', '1.58')
+            h.end_headers()
+        else:
+            h.send_response(200)
+            h.send_header('Content-Type', 'application/json')
+            h.end_headers()
+            h.wfile.write(b'{"foo": 1}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        res = api_client.do('GET', f'{host}/foo')
+        assert 'foo' in res
+
+    assert len(requests) == 2
+
+
+def test_http_retried_exceed_limit():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        h.send_response(429)
+        h.send_header('Retry-After', '1')
+        h.end_headers()
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(retry_timeout_seconds=1, clock=FakeClock())
+        with pytest.raises(TimeoutError):
+            api_client.do('GET', f'{host}/foo')
+
+    assert len(requests) == 1
+
+
+def test_http_retried_on_match():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) == 0:
+            h.send_response(400)
+            h.end_headers()
+            h.wfile.write(b'{"error_code": "abc", "message": "... ClusterNotReadyException ..."}')
+        else:
+            h.send_response(200)
+            h.end_headers()
+            h.wfile.write(b'{"foo": 1}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        res = api_client.do('GET', f'{host}/foo')
+        assert 'foo' in res
+
+    assert len(requests) == 2
+
+
+def test_http_not_retried_on_normal_errors():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) == 0:
+            h.send_response(400)
+            h.end_headers()
+            h.wfile.write(b'{"error_code": "abc", "message": "something not found"}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        with pytest.raises(DatabricksError):
+            api_client.do('GET', f'{host}/foo')
+
+    assert len(requests) == 1
+
+
+def test_http_retried_on_connection_error():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) > 0:
+            h.send_response(200)
+            h.end_headers()
+            h.wfile.write(b'{"foo": 1}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        res = api_client.do('GET', f'{host}/foo')
+        assert 'foo' in res
+
+    assert len(requests) == 2
diff --git a/tests/test_config.py b/tests/test_config.py
index 4b6c05638..2eac6d2f8 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -1,23 +1,37 @@
+import os
+import pathlib
 import platform
+import random
+import string
+from datetime import datetime
 
+import pytest
+
+from databricks.sdk import useragent
 from databricks.sdk.config import Config, with_product, with_user_agent_extra
+from databricks.sdk.credentials_provider import Token
 from databricks.sdk.version import __version__
 
-from .conftest import noop_credentials
-
+from .conftest import noop_credentials, set_az_path
 
-def test_config_copy_preserves_product_and_product_version():
-    c = Config(credentials_strategy=noop_credentials, product='foo', product_version='1.2.3')
-    c2 = c.copy()
-    assert c2._product == 'foo'
-    assert c2._product_version == '1.2.3'
+__tests__ = os.path.dirname(__file__)
 
 
 def test_config_supports_legacy_credentials_provider():
     c = Config(credentials_provider=noop_credentials, product='foo', product_version='1.2.3')
     c2 = c.copy()
-    assert c2._product == 'foo'
-    assert c2._product_version == '1.2.3'
+    assert c2._product_info == ('foo', '1.2.3')
+
+
+@pytest.mark.parametrize('host,expected', [("https://abc.def.ghi", "https://abc.def.ghi"),
+                                           ("https://abc.def.ghi/", "https://abc.def.ghi"),
+                                           ("abc.def.ghi", "https://abc.def.ghi"),
+                                           ("abc.def.ghi/", "https://abc.def.ghi"),
+                                           ("https://abc.def.ghi:443", "https://abc.def.ghi"),
+                                           ("abc.def.ghi:443", "https://abc.def.ghi")])
+def test_config_host_url_format_check(mocker, host, expected):
+    mocker.patch('databricks.sdk.config.Config.init_auth')
+    assert Config(host=host).host == expected
 
 
 def test_extra_and_upstream_user_agent(monkeypatch):
@@ -41,7 +55,7 @@ def system(self):
 
     assert config.user_agent == (
         f"test/0.0.0 databricks-sdk-py/{__version__} python/3.0.0 os/testos auth/basic"
-        f" test-extra-1/1 test-extra-2/2 upstream/upstream-product upstream-version/0.0.1"
+        " test-extra-1/1 test-extra-2/2 upstream/upstream-product upstream-version/0.0.1"
         " runtime/13.1-anything-else")
 
     with_product('some-product', '0.32.1')
@@ -63,6 +77,79 @@ def test_config_copy_deep_copies_user_agent_other_info(config):
     assert "test/test2" in config_copy.user_agent
     assert "test/test2" not in config.user_agent
 
+    original_extra = useragent.extra()
     with_user_agent_extra("blueprint", "0.4.6")
     assert "blueprint/0.4.6" in config.user_agent
     assert "blueprint/0.4.6" in config_copy.user_agent
+    useragent._reset_extra(original_extra)
+
+
+def test_config_deep_copy(monkeypatch, mocker, tmp_path):
+    mocker.patch('databricks.sdk.credentials_provider.CliTokenSource.refresh',
+                 return_value=Token(access_token='token',
+                                    token_type='Bearer',
+                                    expiry=datetime(2023, 5, 22, 0, 0, 0)))
+
+    write_large_dummy_executable(tmp_path)
+    monkeypatch.setenv('PATH', tmp_path.as_posix())
+
+    config = Config(host="https://abc123.azuredatabricks.net", auth_type="databricks-cli")
+    config_copy = config.deep_copy()
+    assert config_copy.host == config.host
+
+
+def write_large_dummy_executable(path: pathlib.Path):
+    cli = path.joinpath('databricks')
+
+    # Generate a long random string to inflate the file size.
+    random_string = ''.join(random.choice(string.ascii_letters) for i in range(1024 * 1024))
+    cli.write_text("""#!/bin/sh
+cat <= (1024 * 1024)
+    return cli
+
+
+def test_load_azure_tenant_id_404(requests_mock, monkeypatch):
+    set_az_path(monkeypatch)
+    mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth', status_code=404)
+    cfg = Config(host="https://abc123.azuredatabricks.net")
+    assert cfg.azure_tenant_id is None
+    assert mock.called_once
+
+
+def test_load_azure_tenant_id_no_location_header(requests_mock, monkeypatch):
+    set_az_path(monkeypatch)
+    mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth', status_code=302)
+    cfg = Config(host="https://abc123.azuredatabricks.net")
+    assert cfg.azure_tenant_id is None
+    assert mock.called_once
+
+
+def test_load_azure_tenant_id_unparsable_location_header(requests_mock, monkeypatch):
+    set_az_path(monkeypatch)
+    mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth',
+                             status_code=302,
+                             headers={'Location': 'https://unexpected-location'})
+    cfg = Config(host="https://abc123.azuredatabricks.net")
+    assert cfg.azure_tenant_id is None
+    assert mock.called_once
+
+
+def test_load_azure_tenant_id_happy_path(requests_mock, monkeypatch):
+    set_az_path(monkeypatch)
+    mock = requests_mock.get(
+        'https://abc123.azuredatabricks.net/aad/auth',
+        status_code=302,
+        headers={'Location': 'https://login.microsoftonline.com/tenant-id/oauth2/authorize'})
+    cfg = Config(host="https://abc123.azuredatabricks.net")
+    assert cfg.azure_tenant_id == 'tenant-id'
+    assert mock.called_once
diff --git a/tests/test_core.py b/tests/test_core.py
index 2403d654b..16a4c2ad6 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -1,20 +1,15 @@
-import contextlib
-import functools
 import os
 import pathlib
+import platform
 import random
 import string
-import typing
 from datetime import datetime
 from http.server import BaseHTTPRequestHandler
-from typing import Iterator, List
 
 import pytest
-import requests
 
-from databricks.sdk import WorkspaceClient
-from databricks.sdk.core import (ApiClient, Config, DatabricksError,
-                                 StreamingResponse)
+from databricks.sdk import WorkspaceClient, errors
+from databricks.sdk.core import ApiClient, Config, DatabricksError
 from databricks.sdk.credentials_provider import (CliTokenSource,
                                                  CredentialsProvider,
                                                  CredentialsStrategy,
@@ -22,11 +17,13 @@
                                                  databricks_cli)
 from databricks.sdk.environments import (ENVIRONMENTS, AzureEnvironment, Cloud,
                                          DatabricksEnvironment)
+from databricks.sdk.oauth import Token
 from databricks.sdk.service.catalog import PermissionsChange
 from databricks.sdk.service.iam import AccessControlRequest
+from databricks.sdk.version import __version__
 
-from .clock import FakeClock
 from .conftest import noop_credentials
+from .fixture_server import http_fixture_server
 
 
 def test_parse_dsn():
@@ -65,39 +62,18 @@ def test_databricks_cli_token_parse_expiry(date_string, expected):
 
 
 def write_small_dummy_executable(path: pathlib.Path):
-    cli = path.joinpath('databricks')
-    cli.write_text('#!/bin/sh\necho "hello world"\n')
-    cli.chmod(0o755)
+    if platform.system() == "Windows":
+        cli = path.joinpath('databricks.exe')
+        cli.touch()
+        cli.write_text('@echo off\necho "hello world"\n')
+    else:
+        cli = path.joinpath('databricks')
+        cli.write_text('#!/bin/sh\necho "hello world"\n')
+        cli.chmod(0o755)
     assert cli.stat().st_size < 1024
     return cli
 
 
-def test_streaming_response_read(config):
-    content = b"some initial binary data: \x00\x01"
-    response = StreamingResponse(DummyResponse([content]))
-    assert response.read() == content
-
-
-def test_streaming_response_read_partial(config):
-    content = b"some initial binary data: \x00\x01"
-    response = StreamingResponse(DummyResponse([content]))
-    assert response.read(8) == b"some ini"
-
-
-def test_streaming_response_read_full(config):
-    content = b"some initial binary data: \x00\x01"
-    response = StreamingResponse(DummyResponse([content, content]))
-    assert response.read() == content + content
-
-
-def test_streaming_response_read_closes(config):
-    content = b"some initial binary data: \x00\x01"
-    dummy_response = DummyResponse([content])
-    with StreamingResponse(dummy_response) as response:
-        assert response.read() == content
-    assert dummy_response.isClosed()
-
-
 def write_large_dummy_executable(path: pathlib.Path):
     cli = path.joinpath('databricks')
 
@@ -131,9 +107,15 @@ def test_databricks_cli_token_source_installed_legacy_with_symlink(config, monke
     dir1.mkdir()
     dir2.mkdir()
 
-    (dir1 / "databricks").symlink_to(write_small_dummy_executable(dir2))
+    if platform.system() == 'Windows':
+        (dir1 / "databricks.exe").symlink_to(write_small_dummy_executable(dir2))
+    else:
+        (dir1 / "databricks").symlink_to(write_small_dummy_executable(dir2))
+
+    path = pathlib.Path(dir1)
+    path = str(path)
+    monkeypatch.setenv('PATH', path)
 
-    monkeypatch.setenv('PATH', dir1.as_posix())
     with pytest.raises(FileNotFoundError, match="version <0.100.0 detected"):
         DatabricksCliTokenSource(config)
 
@@ -173,10 +155,43 @@ def test_databricks_cli_credential_provider_installed_legacy(config, monkeypatch
     assert databricks_cli(config) == None
 
 
-def test_databricks_cli_credential_provider_installed_new(config, monkeypatch, tmp_path):
+def test_databricks_cli_credential_provider_installed_new(config, monkeypatch, tmp_path, mocker):
+    get_mock = mocker.patch('databricks.sdk.credentials_provider.CliTokenSource.refresh',
+                            return_value=Token(access_token='token',
+                                               token_type='Bearer',
+                                               expiry=datetime(2023, 5, 22, 0, 0, 0)))
     write_large_dummy_executable(tmp_path)
-    monkeypatch.setenv('PATH', str(os.pathsep).join([tmp_path.as_posix(), os.environ['PATH']]))
+    path = str(os.pathsep).join([tmp_path.as_posix(), os.environ['PATH']])
+    path = pathlib.Path(path)
+    path = str(path)
+    monkeypatch.setenv('PATH', path)
+
     assert databricks_cli(config) is not None
+    assert get_mock.call_count == 1
+
+
+def test_extra_and_upstream_user_agent(monkeypatch):
+
+    class MockUname:
+
+        @property
+        def system(self):
+            return 'TestOS'
+
+    monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
+    monkeypatch.setattr(platform, 'uname', MockUname)
+    monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
+    monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM_VERSION', "0.0.1")
+    monkeypatch.setenv('DATABRICKS_RUNTIME_VERSION', "13.1 anything/else")
+
+    config = Config(host='http://localhost', username="something", password="something", product='test',
+                    product_version='0.0.0') \
+        .with_user_agent_extra('test-extra-1', '1') \
+        .with_user_agent_extra('test-extra-2', '2')
+
+    assert config.user_agent == (
+        f"test/0.0.0 databricks-sdk-py/{__version__} python/3.0.0 os/testos auth/basic test-extra-1/1 test-extra-2/2"
+        " upstream/upstream-product upstream-version/0.0.1 runtime/13.1-anything-else")
 
 
 def test_config_copy_shallow_copies_credential_provider():
@@ -243,36 +258,6 @@ def test_config_parsing_non_string_env_vars(monkeypatch):
     assert c.debug_truncate_bytes == 100
 
 
-class DummyResponse(requests.Response):
-    _content: Iterator[bytes]
-    _closed: bool = False
-
-    def __init__(self, content: List[bytes]) -> None:
-        super().__init__()
-        self._content = iter(content)
-
-    def iter_content(self, chunk_size: int = 1, decode_unicode=False) -> Iterator[bytes]:
-        return self._content
-
-    def close(self):
-        self._closed = True
-
-    def isClosed(self):
-        return self._closed
-
-
-def test_api_client_do_custom_headers(config, requests_mock):
-    client = ApiClient(config)
-    requests_mock.get("/test",
-                      json={"well": "done"},
-                      request_headers={
-                          "test": "test",
-                          "User-Agent": config.user_agent
-                      })
-    res = client.do("GET", "/test", headers={"test": "test"})
-    assert res == {"well": "done"}
-
-
 def test_access_control_list(config, requests_mock):
     requests_mock.post("http://localhost/api/2.1/jobs/create",
                        request_headers={"User-Agent": config.user_agent})
@@ -298,13 +283,13 @@ def test_shares(config, requests_mock):
 
 
 def test_deletes(config, requests_mock):
-    requests_mock.delete("http://localhost/api/2.0/preview/sql/alerts/alertid",
+    requests_mock.delete("http://localhost/api/2.0/sql/alerts/alertId",
                          request_headers={"User-Agent": config.user_agent},
                          text="null",
                          )
 
     w = WorkspaceClient(config=config)
-    res = w.alerts.delete(alert_id="alertId")
+    res = w.alerts.delete(id="alertId")
 
     assert requests_mock.call_count == 1
     assert requests_mock.called
@@ -312,197 +297,37 @@ def test_deletes(config, requests_mock):
     assert res is None
 
 
-def test_error(config, requests_mock):
-    errorJson = {
-        "message":
-        "errorMessage",
-        "details": [{
-            "type": DatabricksError._error_info_type,
-            "reason": "error reason",
-            "domain": "error domain",
-            "metadata": {
-                "etag": "error etag"
-            },
-        }, {
-            "type": "wrong type",
-            "reason": "wrong reason",
-            "domain": "wrong domain",
-            "metadata": {
-                "etag": "wrong etag"
-            }
-        }],
-    }
-
+@pytest.mark.parametrize(
+    'status_code,headers,body,expected_error',
+    [(401, {}, {
+        'error_code': 'UNAUTHORIZED',
+        'message': 'errorMessage',
+    },
+      errors.Unauthenticated('errorMessage. Config: host=http://localhost, auth_type=noop',
+                             error_code='UNAUTHORIZED')),
+     (403, {}, {
+         'error_code': 'FORBIDDEN',
+         'message': 'errorMessage',
+     },
+      errors.PermissionDenied('errorMessage. Config: host=http://localhost, auth_type=noop',
+                              error_code='FORBIDDEN')), ])
+def test_error(config, requests_mock, status_code, headers, body, expected_error):
     client = ApiClient(config)
-    requests_mock.get("/test", json=errorJson, status_code=400, )
+    requests_mock.get("/test", json=body, status_code=status_code, headers=headers)
     with pytest.raises(DatabricksError) as raised:
         client.do("GET", "/test", headers={"test": "test"})
-
-    error_infos = raised.value.get_error_info()
-    assert len(error_infos) == 1
-    error_info = error_infos[0]
-    assert error_info.reason == "error reason"
-    assert error_info.domain == "error domain"
-    assert error_info.metadata["etag"] == "error etag"
-    assert error_info.type == DatabricksError._error_info_type
-
-
-def test_error_with_scimType():
-    args = {"detail": "detail", "scimType": "scim type"}
-    error = DatabricksError(**args)
-    assert str(error) == f"scim type detail"
-
-
-@contextlib.contextmanager
-def http_fixture_server(handler: typing.Callable[[BaseHTTPRequestHandler], None]):
-    from http.server import HTTPServer
-    from threading import Thread
-
-    class _handler(BaseHTTPRequestHandler):
-
-        def __init__(self, handler: typing.Callable[[BaseHTTPRequestHandler], None], *args):
-            self._handler = handler
-            super().__init__(*args)
-
-        def __getattr__(self, item):
-            if 'do_' != item[0:3]:
-                raise AttributeError(f'method {item} not found')
-            return functools.partial(self._handler, self)
-
-    handler_factory = functools.partial(_handler, handler)
-    srv = HTTPServer(('localhost', 0), handler_factory)
-    t = Thread(target=srv.serve_forever)
-    try:
-        t.daemon = True
-        t.start()
-        yield 'http://{0}:{1}'.format(*srv.server_address)
-    finally:
-        srv.shutdown()
-
-
-@pytest.mark.parametrize('status_code,include_retry_after',
-                         ((429, False), (429, True), (503, False), (503, True)))
-def test_http_retry_after(status_code, include_retry_after):
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) == 0:
-            h.send_response(status_code)
-            if include_retry_after:
-                h.send_header('Retry-After', '1')
-            h.send_header('Content-Type', 'application/json')
-            h.end_headers()
-        else:
-            h.send_response(200)
-            h.send_header('Content-Type', 'application/json')
-            h.end_headers()
-            h.wfile.write(b'{"foo": 1}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        res = api_client.do('GET', '/foo')
-        assert 'foo' in res
-
-    assert len(requests) == 2
-
-
-def test_http_retry_after_wrong_format():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) == 0:
-            h.send_response(429)
-            h.send_header('Retry-After', '1.58')
-            h.end_headers()
-        else:
-            h.send_response(200)
-            h.send_header('Content-Type', 'application/json')
-            h.end_headers()
-            h.wfile.write(b'{"foo": 1}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        res = api_client.do('GET', '/foo')
-        assert 'foo' in res
-
-    assert len(requests) == 2
-
-
-def test_http_retried_exceed_limit():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        h.send_response(429)
-        h.send_header('Retry-After', '1')
-        h.end_headers()
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', retry_timeout_seconds=1, clock=FakeClock()))
-        with pytest.raises(TimeoutError):
-            api_client.do('GET', '/foo')
-
-    assert len(requests) == 1
-
-
-def test_http_retried_on_match():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) == 0:
-            h.send_response(400)
-            h.end_headers()
-            h.wfile.write(b'{"error_code": "abc", "message": "... ClusterNotReadyException ..."}')
-        else:
-            h.send_response(200)
-            h.end_headers()
-            h.wfile.write(b'{"foo": 1}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        res = api_client.do('GET', '/foo')
-        assert 'foo' in res
-
-    assert len(requests) == 2
-
-
-def test_http_not_retried_on_normal_errors():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) == 0:
-            h.send_response(400)
-            h.end_headers()
-            h.wfile.write(b'{"error_code": "abc", "message": "something not found"}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        with pytest.raises(DatabricksError):
-            api_client.do('GET', '/foo')
-
-    assert len(requests) == 1
-
-
-def test_http_retried_on_connection_error():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) > 0:
-            h.send_response(200)
-            h.end_headers()
-            h.wfile.write(b'{"foo": 1}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        res = api_client.do('GET', '/foo')
-        assert 'foo' in res
-
-    assert len(requests) == 2
+    actual = raised.value
+    assert isinstance(actual, type(expected_error))
+    assert str(actual) == str(expected_error)
+    assert actual.error_code == expected_error.error_code
+    assert actual.retry_after_secs == expected_error.retry_after_secs
+    expected_error_infos, actual_error_infos = expected_error.get_error_info(), actual.get_error_info()
+    assert len(expected_error_infos) == len(actual_error_infos)
+    for expected, actual in zip(expected_error_infos, actual_error_infos):
+        assert expected.type == actual.type
+        assert expected.reason == actual.reason
+        assert expected.domain == actual.domain
+        assert expected.metadata == actual.metadata
 
 
 def test_github_oidc_flow_works_with_azure(monkeypatch):
diff --git a/tests/test_data_plane.py b/tests/test_data_plane.py
new file mode 100644
index 000000000..a74658964
--- /dev/null
+++ b/tests/test_data_plane.py
@@ -0,0 +1,59 @@
+from datetime import datetime, timedelta
+
+from databricks.sdk.data_plane import DataPlaneService
+from databricks.sdk.oauth import Token
+from databricks.sdk.service.oauth2 import DataPlaneInfo
+
+info = DataPlaneInfo(authorization_details="authDetails", endpoint_url="url")
+
+token = Token(access_token="token", token_type="type", expiry=datetime.now() + timedelta(hours=1))
+
+
+class MockRefresher:
+
+    def __init__(self, expected: str):
+        self._expected = expected
+
+    def __call__(self, auth_details: str) -> Token:
+        assert self._expected == auth_details
+        return token
+
+
+def throw_exception():
+    raise Exception("Expected value to be cached")
+
+
+def test_not_cached():
+    data_plane = DataPlaneService()
+    res = data_plane.get_data_plane_details("method", ["params"], lambda: info,
+                                            lambda a: MockRefresher(info.authorization_details).__call__(a))
+    assert res.endpoint_url == info.endpoint_url
+    assert res.token == token
+
+
+def test_token_expired():
+    expired = Token(access_token="expired", token_type="type", expiry=datetime.now() + timedelta(hours=-1))
+    data_plane = DataPlaneService()
+    data_plane._tokens["method/params"] = expired
+    res = data_plane.get_data_plane_details("method", ["params"], lambda: info,
+                                            lambda a: MockRefresher(info.authorization_details).__call__(a))
+    assert res.endpoint_url == info.endpoint_url
+    assert res.token == token
+
+
+def test_info_cached():
+    data_plane = DataPlaneService()
+    data_plane._data_plane_info["method/params"] = info
+    res = data_plane.get_data_plane_details("method", ["params"], throw_exception,
+                                            lambda a: MockRefresher(info.authorization_details).__call__(a))
+    assert res.endpoint_url == info.endpoint_url
+    assert res.token == token
+
+
+def test_token_cached():
+    data_plane = DataPlaneService()
+    data_plane._data_plane_info["method/params"] = info
+    data_plane._tokens["method/params"] = token
+    res = data_plane.get_data_plane_details("method", ["params"], throw_exception, throw_exception)
+    assert res.endpoint_url == info.endpoint_url
+    assert res.token == token
diff --git a/tests/test_dbfs_mixins.py b/tests/test_dbfs_mixins.py
index 427c445fd..6bbaca7a2 100644
--- a/tests/test_dbfs_mixins.py
+++ b/tests/test_dbfs_mixins.py
@@ -70,3 +70,33 @@ def test_fs_path_invalid(config):
     with pytest.raises(ValueError) as e:
         dbfs_ext._path('s3://path/to/file')
     assert 'unsupported scheme "s3"' in str(e.value)
+
+
+def test_dbfs_local_path_mkdir(config, tmp_path):
+    from databricks.sdk import WorkspaceClient
+
+    w = WorkspaceClient(config=config)
+    w.dbfs._path(f'file:{tmp_path}/test_dir').mkdir()
+    assert w.dbfs.exists(f'file:{tmp_path}/test_dir')
+
+
+def test_dbfs_exists(config, mocker):
+    from databricks.sdk import WorkspaceClient
+
+    get_status = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status', side_effect=NotFound())
+
+    client = WorkspaceClient(config=config)
+    client.dbfs.exists('/abc/def/ghi')
+
+    get_status.assert_called_with('/abc/def/ghi')
+
+
+def test_volume_exists(config, mocker):
+    from databricks.sdk import WorkspaceClient
+
+    get_metadata = mocker.patch('databricks.sdk.service.files.FilesAPI.get_metadata')
+
+    client = WorkspaceClient(config=config)
+    client.dbfs.exists('/Volumes/abc/def/ghi')
+
+    get_metadata.assert_called_with('/Volumes/abc/def/ghi')
diff --git a/tests/test_errors.py b/tests/test_errors.py
index 87111c4b4..881f016f3 100644
--- a/tests/test_errors.py
+++ b/tests/test_errors.py
@@ -1,97 +1,138 @@
+import http.client
+import json
+from typing import List, Optional, Tuple
+
 import pytest
 import requests
 
 from databricks.sdk import errors
 
 
-def fake_response(status_code: int) -> requests.Response:
+def fake_response(method: str,
+                  status_code: int,
+                  response_body: str,
+                  path: Optional[str] = None) -> requests.Response:
+    return fake_raw_response(method, status_code, response_body.encode('utf-8'), path)
+
+
+def fake_raw_response(method: str,
+                      status_code: int,
+                      response_body: bytes,
+                      path: Optional[str] = None) -> requests.Response:
     resp = requests.Response()
     resp.status_code = status_code
-    resp.request = requests.Request('GET', 'https://databricks.com/api/2.0/service').prepare()
+    resp.reason = http.client.responses.get(status_code, '')
+    if path is None:
+        path = '/api/2.0/service'
+    resp.request = requests.Request(method, f"https://databricks.com{path}").prepare()
+    resp._content = response_body
     return resp
 
 
-def test_error_code_has_precedence_over_http_status():
-    err = errors.error_mapper(fake_response(400), {
-        'error_code': 'INVALID_PARAMETER_VALUE',
-        'message': 'nope'
-    })
-    assert errors.InvalidParameterValue == type(err)
-
-
-def test_http_status_code_maps_fine():
-    err = errors.error_mapper(fake_response(400), {'error_code': 'MALFORMED_REQUEST', 'message': 'nope'})
-    assert errors.BadRequest == type(err)
-
-
-def test_other_errors_also_map_fine():
-    err = errors.error_mapper(fake_response(417), {'error_code': 'WHOOPS', 'message': 'nope'})
-    assert errors.DatabricksError == type(err)
+def fake_valid_response(method: str,
+                        status_code: int,
+                        error_code: str,
+                        message: str,
+                        path: Optional[str] = None) -> requests.Response:
+    body = {'message': message}
+    if error_code:
+        body['error_code'] = error_code
+    return fake_response(method, status_code, json.dumps(body), path)
 
 
-def test_missing_error_code():
-    err = errors.error_mapper(fake_response(522), {'message': 'nope'})
-    assert errors.DatabricksError == type(err)
-
-
-def test_private_link_error():
+def make_private_link_response() -> requests.Response:
     resp = requests.Response()
     resp.url = 'https://databricks.com/login.html?error=private-link-validation-error'
     resp.request = requests.Request('GET', 'https://databricks.com/api/2.0/service').prepare()
-    err = errors.error_mapper(resp, {})
-    assert errors.PrivateLinkValidationError == type(err)
-
-
-@pytest.mark.parametrize('status_code, error_code, klass',
-                         [(400, ..., errors.BadRequest), (400, 'INVALID_PARAMETER_VALUE', errors.BadRequest),
-                          (400, 'INVALID_PARAMETER_VALUE', errors.InvalidParameterValue),
-                          (400, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests), (400, ..., IOError),
-                          (401, ..., errors.Unauthenticated), (401, ..., IOError),
-                          (403, ..., errors.PermissionDenied),
-                          (403, ..., IOError), (404, ..., errors.NotFound), (404, ..., IOError),
-                          (409, ..., errors.ResourceConflict), (409, 'ABORTED', errors.Aborted),
-                          (409, 'ABORTED', errors.ResourceConflict),
-                          (409, 'ALREADY_EXISTS', errors.AlreadyExists),
-                          (409, 'ALREADY_EXISTS', errors.ResourceConflict), (409, ..., IOError),
-                          (429, ..., errors.TooManyRequests),
-                          (429, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests),
-                          (429, 'REQUEST_LIMIT_EXCEEDED', errors.RequestLimitExceeded),
-                          (429, 'RESOURCE_EXHAUSTED', errors.TooManyRequests),
-                          (429, 'RESOURCE_EXHAUSTED', errors.ResourceExhausted), (429, ..., IOError),
-                          (499, ..., errors.Cancelled), (499, ..., IOError), (500, ..., errors.InternalError),
-                          (500, 'UNKNOWN', errors.InternalError), (500, 'UNKNOWN', errors.Unknown),
-                          (500, 'DATA_LOSS', errors.InternalError), (500, 'DATA_LOSS', errors.DataLoss),
-                          (500, ..., IOError), (501, ..., errors.NotImplemented), (501, ..., IOError),
-                          (503, ..., errors.TemporarilyUnavailable), (503, ..., IOError),
-                          (504, ..., errors.DeadlineExceeded), (504, ..., IOError),
-                          (444, ..., errors.DatabricksError), (444, ..., IOError), ])
-def test_subclasses(status_code, error_code, klass):
-    try:
-        raise errors.error_mapper(fake_response(status_code), {'error_code': error_code, 'message': 'nope'})
-    except klass:
-        return
+    resp._content = b'{}'
+    resp.status_code = 200
+    return resp
 
 
-@pytest.mark.parametrize('verb, path, status_code, error_code, message, expected_error',
-                         [[
-                             'GET', '/api/2.0/clusters/get', 400, 'INVALID_PARAMETER_VALUE',
-                             'Cluster abcde does not exist', errors.ResourceDoesNotExist
-                         ],
-                          [
-                              'GET', '/api/2.0/jobs/get', 400, 'INVALID_PARAMETER_VALUE',
-                              'Job abcde does not exist', errors.ResourceDoesNotExist
-                          ],
-                          [
-                              'GET', '/api/2.1/jobs/get', 400, 'INVALID_PARAMETER_VALUE',
-                              'Job abcde does not exist', errors.ResourceDoesNotExist
-                          ],
-                          [
-                              'GET', '/api/2.1/jobs/get', 400, 'INVALID_PARAMETER_VALUE',
-                              'Invalid spark version', errors.InvalidParameterValue
-                          ], ])
-def test_error_overrides(verb, path, status_code, error_code, message, expected_error):
-    resp = requests.Response()
-    resp.status_code = status_code
-    resp.request = requests.Request(verb, f'https://databricks.com{path}').prepare()
-    with pytest.raises(expected_error):
-        raise errors.error_mapper(resp, {'error_code': error_code, 'message': message})
+# This should be `(int, str, type)` but doesn't work in Python 3.7-3.8.
+base_subclass_test_cases: List[Tuple[int, str,
+                                     type]] = [(400, '', errors.BadRequest),
+                                               (400, 'INVALID_PARAMETER_VALUE', errors.BadRequest),
+                                               (400, 'INVALID_PARAMETER_VALUE', errors.InvalidParameterValue),
+                                               (400, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests),
+                                               (400, '', IOError), (401, '', errors.Unauthenticated),
+                                               (401, '', IOError), (403, '', errors.PermissionDenied),
+                                               (403, '', IOError), (404, '', errors.NotFound),
+                                               (404, '', IOError), (409, '', errors.ResourceConflict),
+                                               (409, 'ABORTED', errors.Aborted),
+                                               (409, 'ABORTED', errors.ResourceConflict),
+                                               (409, 'ALREADY_EXISTS', errors.AlreadyExists),
+                                               (409, 'ALREADY_EXISTS', errors.ResourceConflict),
+                                               (409, '', IOError), (429, '', errors.TooManyRequests),
+                                               (429, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests),
+                                               (429, 'REQUEST_LIMIT_EXCEEDED', errors.RequestLimitExceeded),
+                                               (429, 'RESOURCE_EXHAUSTED', errors.TooManyRequests),
+                                               (429, 'RESOURCE_EXHAUSTED', errors.ResourceExhausted),
+                                               (429, '', IOError), (499, '', errors.Cancelled),
+                                               (499, '', IOError), (500, '', errors.InternalError),
+                                               (500, 'UNKNOWN', errors.InternalError),
+                                               (500, 'UNKNOWN', errors.Unknown),
+                                               (500, 'DATA_LOSS', errors.InternalError),
+                                               (500, 'DATA_LOSS', errors.DataLoss), (500, '', IOError),
+                                               (501, '', errors.NotImplemented), (501, '', IOError),
+                                               (503, '', errors.TemporarilyUnavailable), (503, '', IOError),
+                                               (504, '', errors.DeadlineExceeded), (504, '', IOError),
+                                               (444, '', errors.DatabricksError), (444, '', IOError), ]
+
+subclass_test_cases = [(fake_valid_response('GET', x[0], x[1], 'nope'), x[2], 'nope')
+                       for x in base_subclass_test_cases]
+
+
+@pytest.mark.parametrize(
+    'response, expected_error, expected_message', subclass_test_cases +
+    [(fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'),
+     (fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'),
+     (fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'),
+     (make_private_link_response(), errors.PrivateLinkValidationError,
+      ('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. '
+       'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC '
+       'endpoint. For more information, see '
+       'https://docs.databricks.com/en/security/network/classic/privatelink.html.'),
+      ),
+     (fake_valid_response(
+         'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist',
+         '/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'),
+     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
+                          '/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
+     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
+                          '/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
+     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version',
+                          '/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'),
+     (fake_response(
+         'GET', 400,
+         'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
+     ), errors.BadRequest,
+      'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
+      ),
+     (fake_response('GET', 400, '
Worker environment not ready
'), errors.BadRequest, + 'Worker environment not ready'), + (fake_response('GET', 400, 'this is not a real response'), errors.BadRequest, + ('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. ' + 'Please report this issue with the following debugging information to the SDK issue tracker at ' + 'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n' + '< 400 Bad Request\n' + '< this is not a real response```')), + (fake_response( + 'GET', 404, + json.dumps({ + 'detail': 'Group with id 1234 is not found', + 'status': '404', + 'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error'] + })), errors.NotFound, 'None Group with id 1234 is not found'), + (fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound, + 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```' + ), + (fake_raw_response('GET', 404, b'\x80'), errors.NotFound, + 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```' + )]) +def test_get_api_error(response, expected_error, expected_message): + parser = errors._Parser() + with pytest.raises(errors.DatabricksError) as e: + raise parser.get_api_error(response) + assert isinstance(e.value, expected_error) + assert str(e.value) == expected_message diff --git a/tests/test_jobs.py b/tests/test_jobs.py new file mode 100644 index 000000000..50143f193 --- /dev/null +++ b/tests/test_jobs.py @@ -0,0 +1,100 @@ +from databricks.sdk import WorkspaceClient + + +# Test cases below are checking that we pinned API 2.1 for certain endpoints, DO NOT REMOVE OR CHANGE THEM. https://databricks.atlassian.net/browse/JOBS-19298 +def test_jobs_create(config, requests_mock): + requests_mock.post("http://localhost/api/2.1/jobs/create", + request_headers={ + 'Accept': 'application/json', + 'Content-Type': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + w.jobs.create() + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_update(config, requests_mock): + requests_mock.post("http://localhost/api/2.1/jobs/update", + request_headers={ + 'Accept': 'application/json', + 'Content-Type': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + w.jobs.update(job_id="job_id") + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_list(config, requests_mock): + requests_mock.get("http://localhost/api/2.1/jobs/list", + request_headers={ + 'Accept': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + for _ in w.jobs.list(): + pass + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_get(config, requests_mock): + requests_mock.get("http://localhost/api/2.1/jobs/get", + request_headers={ + 'Accept': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + w.jobs.get(job_id="job_id") + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_reset(config, requests_mock): + requests_mock.post("http://localhost/api/2.1/jobs/reset", + request_headers={ + 'Accept': 'application/json', + 'Content-Type': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + w.jobs.reset(job_id="job_id", new_settings=None) + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_runs_list(config, requests_mock): + requests_mock.get("http://localhost/api/2.1/jobs/runs/list", + request_headers={ + 'Accept': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + for _ in w.jobs.list_runs(job_id="job_id"): + pass + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +# End of test cases for API 2.1 pinning diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py new file mode 100644 index 000000000..092a3bf16 --- /dev/null +++ b/tests/test_model_serving_auth.py @@ -0,0 +1,106 @@ +import time + +import pytest + +from databricks.sdk.core import Config + +from .conftest import raises + +default_auth_base_error_message = \ + "default auth: cannot configure default credentials, " \ + "please check https://docs.databricks.com/en/dev-tools/auth.html#databricks-client-unified-authentication " \ + "to configure credentials for your preferred authentication method" + + +@pytest.mark.parametrize("env_values, del_env_values, oauth_file_name", + [([ + ('IS_IN_DB_MODEL_SERVING_ENV', 'true'), ('DB_MODEL_SERVING_HOST_URL', 'x') + ], ['DATABRICKS_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"), + ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'), + ('DB_MODEL_SERVING_HOST_URL', 'x')], ['DATABRICKS_MODEL_SERVING_HOST_URL'], + "tests/testdata/model-serving-test-token"), + ([('IS_IN_DB_MODEL_SERVING_ENV', 'true'), ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x') + ], ['DB_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"), + ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'), + ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x') + ], ['DB_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"), ]) +def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeypatch, mocker): + ## In mlflow we check for these two environment variables to return the correct config + for (env_name, env_value) in env_values: + monkeypatch.setenv(env_name, env_value) + + for (env_name) in del_env_values: + monkeypatch.delenv(env_name, raising=False) + + # patch mlflow to read the file from the test directory + monkeypatch.setattr( + "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", + oauth_file_name) + mocker.patch('databricks.sdk.config.Config._known_file_config_loader') + + cfg = Config() + + assert cfg.auth_type == 'model-serving' + headers = cfg.authenticate() + assert (cfg.host == 'x') + # Token defined in the test file + assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token' + + +@pytest.mark.parametrize("env_values, oauth_file_name", [ + ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name + ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')], "invalid_file_name"), # In Model Serving and Invalid File Name + ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true') + ], "invalid_file_name"), # In Model Serving and Invalid File Name + ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name +]) +@raises(default_auth_base_error_message) +def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch): + for (env_name, env_value) in env_values: + monkeypatch.setenv(env_name, env_value) + monkeypatch.setattr( + "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", + oauth_file_name) + + Config() + + +def test_model_serving_auth_refresh(monkeypatch, mocker): + ## In mlflow we check for these two environment variables to return the correct config + monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true') + monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x') + + # patch mlflow to read the file from the test directory + monkeypatch.setattr( + "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", + "tests/testdata/model-serving-test-token") + mocker.patch('databricks.sdk.config.Config._known_file_config_loader') + + cfg = Config() + assert cfg.auth_type == 'model-serving' + + current_time = time.time() + headers = cfg.authenticate() + assert (cfg.host == 'x') + assert headers.get( + "Authorization") == 'Bearer databricks_sdk_unit_test_token' # Token defined in the test file + + # Simulate refreshing the token by patching to to a new file + monkeypatch.setattr( + "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", + "tests/testdata/model-serving-test-token-v2") + + monkeypatch.setattr('databricks.sdk.credentials_provider.time.time', lambda: current_time + 10) + + headers = cfg.authenticate() + assert (cfg.host == 'x') + # Read from cache even though new path is set because expiry is still not hit + assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token' + + # Expiry is 300 seconds so this should force an expiry and re read from the new file path + monkeypatch.setattr('databricks.sdk.credentials_provider.time.time', lambda: current_time + 600) + + headers = cfg.authenticate() + assert (cfg.host == 'x') + # Read V2 now + assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token_v2' diff --git a/tests/test_oauth.py b/tests/test_oauth.py index ce2d514ff..a637a5508 100644 --- a/tests/test_oauth.py +++ b/tests/test_oauth.py @@ -1,29 +1,126 @@ -from databricks.sdk.core import Config -from databricks.sdk.oauth import OAuthClient, OidcEndpoints, TokenCache - - -def test_token_cache_unique_filename_by_host(mocker): - mocker.patch.object(Config, "oidc_endpoints", - OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - common_args = dict(client_id="abc", redirect_url="http://localhost:8020") - c1 = OAuthClient(host="http://localhost:", **common_args) - c2 = OAuthClient(host="https://bar.cloud.databricks.com", **common_args) - assert TokenCache(c1).filename != TokenCache(c2).filename - - -def test_token_cache_unique_filename_by_client_id(mocker): - mocker.patch.object(Config, "oidc_endpoints", - OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - common_args = dict(host="http://localhost:", redirect_url="http://localhost:8020") - c1 = OAuthClient(client_id="abc", **common_args) - c2 = OAuthClient(client_id="def", **common_args) - assert TokenCache(c1).filename != TokenCache(c2).filename - - -def test_token_cache_unique_filename_by_scopes(mocker): - mocker.patch.object(Config, "oidc_endpoints", - OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - common_args = dict(host="http://localhost:", client_id="abc", redirect_url="http://localhost:8020") - c1 = OAuthClient(scopes=["foo"], **common_args) - c2 = OAuthClient(scopes=["bar"], **common_args) - assert TokenCache(c1).filename != TokenCache(c2).filename +from databricks.sdk._base_client import _BaseClient +from databricks.sdk.oauth import (OidcEndpoints, TokenCache, + get_account_endpoints, + get_workspace_endpoints) + +from .clock import FakeClock + + +def test_token_cache_unique_filename_by_host(): + common_args = dict(client_id="abc", + redirect_url="http://localhost:8020", + oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) + assert TokenCache(host="http://localhost:", + **common_args).filename != TokenCache("https://bar.cloud.databricks.com", + **common_args).filename + + +def test_token_cache_unique_filename_by_client_id(): + common_args = dict(host="http://localhost:", + redirect_url="http://localhost:8020", + oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) + assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def", + **common_args).filename + + +def test_token_cache_unique_filename_by_scopes(): + common_args = dict(host="http://localhost:", + client_id="abc", + redirect_url="http://localhost:8020", + oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) + assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"], + **common_args).filename + + +def test_account_oidc_endpoints(requests_mock): + requests_mock.get( + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server", + json={ + "authorization_endpoint": + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", + "token_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token" + }) + client = _BaseClient(clock=FakeClock()) + endpoints = get_account_endpoints("accounts.cloud.databricks.com", "abc-123", client=client) + assert endpoints == OidcEndpoints( + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token") + + +def test_account_oidc_endpoints_retry_on_429(requests_mock): + # It doesn't seem possible to use requests_mock to return different responses for the same request, e.g. when + # simulating a transient failure. Instead, the nth_request matcher increments a test-wide counter and only matches + # the nth request. + request_count = 0 + + def nth_request(n): + + def observe_request(_request): + nonlocal request_count + is_match = request_count == n + if is_match: + request_count += 1 + return is_match + + return observe_request + + requests_mock.get( + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server", + additional_matcher=nth_request(0), + status_code=429) + requests_mock.get( + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server", + additional_matcher=nth_request(1), + json={ + "authorization_endpoint": + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", + "token_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token" + }) + client = _BaseClient(clock=FakeClock()) + endpoints = get_account_endpoints("accounts.cloud.databricks.com", "abc-123", client=client) + assert endpoints == OidcEndpoints( + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token") + + +def test_workspace_oidc_endpoints(requests_mock): + requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", + json={ + "authorization_endpoint": + "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", + "token_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/token" + }) + client = _BaseClient(clock=FakeClock()) + endpoints = get_workspace_endpoints("my-workspace.cloud.databricks.com", client=client) + assert endpoints == OidcEndpoints("https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", + "https://my-workspace.cloud.databricks.com/oidc/oauth/token") + + +def test_workspace_oidc_endpoints_retry_on_429(requests_mock): + request_count = 0 + + def nth_request(n): + + def observe_request(_request): + nonlocal request_count + is_match = request_count == n + if is_match: + request_count += 1 + return is_match + + return observe_request + + requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", + additional_matcher=nth_request(0), + status_code=429) + requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", + additional_matcher=nth_request(1), + json={ + "authorization_endpoint": + "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", + "token_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/token" + }) + client = _BaseClient(clock=FakeClock()) + endpoints = get_workspace_endpoints("my-workspace.cloud.databricks.com", client=client) + assert endpoints == OidcEndpoints("https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", + "https://my-workspace.cloud.databricks.com/oidc/oauth/token") diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py new file mode 100644 index 000000000..1858c66cb --- /dev/null +++ b/tests/test_open_ai_mixin.py @@ -0,0 +1,30 @@ +import sys + +import pytest + +from databricks.sdk.core import Config + + +def test_open_ai_client(monkeypatch): + from databricks.sdk import WorkspaceClient + + monkeypatch.setenv('DATABRICKS_HOST', 'test_host') + monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') + w = WorkspaceClient(config=Config()) + client = w.serving_endpoints.get_open_ai_client() + + assert client.base_url == "https://test_host/serving-endpoints/" + assert client.api_key == "no-token" + + +@pytest.mark.skipif(sys.version_info < (3, 8), reason="Requires Python > 3.7") +def test_langchain_open_ai_client(monkeypatch): + from databricks.sdk import WorkspaceClient + + monkeypatch.setenv('DATABRICKS_HOST', 'test_host') + monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') + w = WorkspaceClient(config=Config()) + client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") + + assert client.openai_api_base == "https://test_host/serving-endpoints" + assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py new file mode 100644 index 000000000..5083d9908 --- /dev/null +++ b/tests/test_user_agent.py @@ -0,0 +1,42 @@ +import pytest + +from databricks.sdk.version import __version__ + + +@pytest.fixture(scope="function") +def user_agent(): + from databricks.sdk import useragent + orig_product_name = useragent._product_name + orig_product_version = useragent._product_version + orig_extra = useragent._extra + + yield useragent + + useragent._product_name = orig_product_name + useragent._product_version = orig_product_version + useragent._extra = orig_extra + + +@pytest.mark.xdist_group(name="user_agent") +def test_user_agent(user_agent): + user_agent._reset_product() + default = user_agent.to_string() + + assert 'unknown/0.0.0' in default + assert 'databricks-sdk-py/' + __version__ in default + assert 'os/' in default + assert 'python/' in default + + +@pytest.mark.xdist_group(name="user_agent") +def test_user_agent_with_product(user_agent): + user_agent.with_product('test', '1.0.0') + assert 'test/1.0.0' in user_agent.to_string() + + +@pytest.mark.xdist_group(name="user_agent") +def test_user_agent_with_partner(user_agent): + user_agent.with_partner('test') + user_agent.with_partner('differenttest') + assert 'partner/test' in user_agent.to_string() + assert 'partner/differenttest' in user_agent.to_string() diff --git a/tests/testdata/az b/tests/testdata/az index 5bf43a663..7437babce 100755 --- a/tests/testdata/az +++ b/tests/testdata/az @@ -1,7 +1,20 @@ #!/bin/bash -if [ -n "$WARN" ]; then - >&2 /bin/echo "WARNING: ${WARN}" +# If the arguments are "account show", return the account details. +if [ "$1" == "account" ] && [ "$2" == "show" ]; then + /bin/echo "{ + \"environmentName\": \"AzureCloud\", + \"id\": \"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee\", + \"isDefault\": true, + \"name\": \"Pay-As-You-Go\", + \"state\": \"Enabled\", + \"tenantId\": \"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee\", + \"user\": { + \"name\": \"${AZ_USER_NAME:-testuser@databricks.com}\", + \"type\": \"${AZ_USER_TYPE:-user}\" + } +}" + exit 0 fi if [ "yes" == "$FAIL" ]; then @@ -26,6 +39,21 @@ for arg in "$@"; do fi done +# Add character to file at $COUNT if it is defined. +if [ -n "$COUNT" ]; then + echo -n x >> "$COUNT" +fi + +# If FAIL_IF_TENANT_ID_SET is set & --tenant-id is passed, fail. +if [ -n "$FAIL_IF_TENANT_ID_SET" ]; then + for arg in "$@"; do + if [[ "$arg" == "--tenant" ]]; then + echo 1>&2 "ERROR: Tenant shouldn't be specified for managed identity account" + exit 1 + fi + done +fi + # Macos EXP="$(/bin/date -v+${EXPIRE:=10S} +'%F %T' 2>/dev/null)" if [ -z "${EXP}" ]; then diff --git a/tests/testdata/model-serving-test-token b/tests/testdata/model-serving-test-token new file mode 100644 index 000000000..3415ff226 --- /dev/null +++ b/tests/testdata/model-serving-test-token @@ -0,0 +1,7 @@ +{ + "OAUTH_TOKEN": [ + { + "oauthTokenValue": "databricks_sdk_unit_test_token" + } + ] +} \ No newline at end of file diff --git a/tests/testdata/model-serving-test-token-v2 b/tests/testdata/model-serving-test-token-v2 new file mode 100644 index 000000000..2567a7e50 --- /dev/null +++ b/tests/testdata/model-serving-test-token-v2 @@ -0,0 +1,7 @@ +{ + "OAUTH_TOKEN": [ + { + "oauthTokenValue": "databricks_sdk_unit_test_token_v2" + } + ] +} \ No newline at end of file diff --git a/tests/testdata/windows/az.ps1 b/tests/testdata/windows/az.ps1 new file mode 100644 index 000000000..97ecbca7c --- /dev/null +++ b/tests/testdata/windows/az.ps1 @@ -0,0 +1,84 @@ +#!/usr/bin/env pwsh + +# If the arguments are "account show", return the account details. +if ($args[0] -eq "account" -and $args[1] -eq "show") { + $output = @{ + environmentName = "AzureCloud" + id = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" + isDefault = $true + name = "Pay-As-You-Go" + state = "Enabled" + tenantId = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" + user = @{ + name = if ($env:AZ_USER_NAME) { $env:AZ_USER_NAME } else { "testuser@databricks.com" } + type = if ($env:AZ_USER_TYPE) { $env:AZ_USER_TYPE } else { "user" } + } + } + $output | ConvertTo-Json + exit 0 +} + +if ($env:WARN) { + Write-Error "WARNING: $env:WARN" +} + +if ($env:FAIL -eq "yes") { + Write-Error "This is just a failing script." + exit 1 +} + +if ($env:FAIL -eq "logout") { + Write-Error "No subscription found. Run 'az account set' to select a subscription." + exit 1 +} + +if ($env:FAIL -eq "corrupt") { + Write-Output "{accessToken: ..corrupt" + exit +} + +param ( + [string[]]$Args +) + +foreach ($arg in $Args) { + if ($arg -eq $env:FAIL_IF) { + Write-Output "Failed" + exit 1 + } +} + +# If FAIL_IF_TENANT_ID_SET is set & --tenant-id is passed, fail. +if ($env:FAIL_IF_TENANT_ID_SET) { + foreach ($arg in $args) { + if ($arg -eq "--tenant-id" -or $arg -like "--tenant*") { + Write-Error "ERROR: Tenant shouldn't be specified for managed identity account" + exit 1 + } + } +} + +try { + $EXP = (Get-Date).AddSeconds($env:EXPIRE -as [int]) +} catch { + $expireString = $env:EXPIRE + $expireString = $expireString -replace "S", "seconds" + $expireString = $expireString -replace "M", "minutes" + $EXP = (Get-Date).AddSeconds($expireString -as [int]) +} + +if (-not $env:TF_AAD_TOKEN) { + $TF_AAD_TOKEN = "..." +} else { + $TF_AAD_TOKEN = $env:TF_AAD_TOKEN +} + +$expiresOn = $EXP.ToString("yyyy-MM-dd HH:mm:ss") + +Write-Output "{ + `"accessToken`": `"$TF_AAD_TOKEN`", + `"expiresOn`": `"$expiresOn`", + `"subscription`": `"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee`", + `"tenant`": `"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee`", + `"tokenType`": `"Bearer`" +}"