diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 153783450..62d390339 100755 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -11ae6f9d98f0d0838a5e53c27032f178fecc4ee0 \ No newline at end of file +9e9cd2a1a802f6df10f3a5ffe6aa97b588d5884a \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 6fc8f76f1..4710c40d0 100755 --- a/.gitattributes +++ b/.gitattributes @@ -319,6 +319,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreatePolic databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableRequest.java linguist-generated=true @@ -357,6 +358,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQuali databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java linguist-generated=true @@ -459,6 +461,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRes databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java linguist-generated=true @@ -501,6 +504,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegiste databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java linguist-generated=true @@ -596,6 +601,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo. databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Secret.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Securable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java linguist-generated=true @@ -662,6 +671,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePolic databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java linguist-generated=true @@ -2668,11 +2678,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablem databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthentication.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentityPrincipalType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyIpRanges.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicAccess.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicAccessRestrictionMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java linguist-generated=true @@ -2947,6 +2959,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePers databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java linguist-generated=true @@ -3341,6 +3355,29 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesServi databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteToolRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GenieSpace.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetSupervisorAgentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetToolRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/KnowledgeAssistant.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgent.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Volume.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagAssignmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java linguist-generated=true diff --git a/.github/workflows/next-changelog.yml b/.github/workflows/next-changelog.yml index 847aadb8e..475157d1f 100755 --- a/.github/workflows/next-changelog.yml +++ b/.github/workflows/next-changelog.yml @@ -11,8 +11,8 @@ jobs: # Allow Dependabot PRs to pass without a changelog entry if: github.actor != 'dependabot[bot]' runs-on: - group: databricks-deco-testing-runner-group - labels: ubuntu-latest-deco + group: databricks-protected-runner-group + labels: linux-ubuntu-latest steps: - name: Checkout code diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml index c3c6d2d00..e22f794e0 100755 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -32,8 +32,8 @@ jobs: github.repository == 'databricks/databricks-sdk-java' environment: "release-is" runs-on: - group: databricks-deco-testing-runner-group - labels: ubuntu-latest-deco + group: databricks-protected-runner-group + labels: linux-ubuntu-latest steps: - name: Generate GitHub App Token id: generate-token @@ -62,3 +62,11 @@ jobs: GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} GITHUB_REPOSITORY: ${{ github.repository }} run: uv run --locked tagging.py + + - name: Upload created tags artifact + if: always() + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + with: + name: created-tags + path: created_tags.json + if-no-files-found: ignore diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index b25d748cf..bc7d251d6 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -20,3 +20,27 @@ * Migrated internal SDK classes to the logging abstraction. The SDK now supports SLF4J, `java.util.logging`, or a custom backend via `LoggerFactory.setDefault()`. ### API Changes +* Add `com.databricks.sdk.service.supervisoragents` package. +* Add `workspaceClient.secretsUc()` service. +* Add `workspaceClient.supervisorAgents()` service. +* Add `update()` method for `workspaceClient.tokens()` service. +* Add `etag` field for `com.databricks.sdk.service.dashboards.GenieSpace`. +* Add `etag` field for `com.databricks.sdk.service.dashboards.GenieUpdateSpaceRequest`. +* Add `branchId` field for `com.databricks.sdk.service.postgres.BranchStatus`. +* Add `catalogId` field for `com.databricks.sdk.service.postgres.CatalogCatalogStatus`. +* Add `databaseId` field for `com.databricks.sdk.service.postgres.DatabaseDatabaseStatus`. +* Add `endpointId` field for `com.databricks.sdk.service.postgres.EndpointStatus`. +* Add `projectId` field for `com.databricks.sdk.service.postgres.ProjectStatus`. +* Add `roleId` field for `com.databricks.sdk.service.postgres.RoleRoleStatus`. +* Add `project` field for `com.databricks.sdk.service.postgres.SyncedTableSyncedTableStatus`. +* Add `manual` field for `com.databricks.sdk.service.provisioning.CreateGcpKeyInfo`. +* Add `manual` field for `com.databricks.sdk.service.provisioning.GcpKeyInfo`. +* Add `appsRuntime` and `lakebaseRuntime` fields for `com.databricks.sdk.service.settings.CustomerFacingIngressNetworkPolicyRequestDestination`. +* Add `blockedInternetDestinations` field for `com.databricks.sdk.service.settings.EgressNetworkPolicyNetworkAccessPolicy`. +* Add `columnsToSync` field for `com.databricks.sdk.service.vectorsearch.DeltaSyncVectorIndexSpecResponse`. +* Add `BREAKING_CHANGE` enum value for `com.databricks.sdk.service.jobs.TerminationCodeCode`. +* [Breaking] Change `updateCatalogConfig()` method for `workspaceClient.dataClassification()` service. Method path has changed. +* [Breaking] Change `updateDefaultWorkspaceBaseEnvironment()` method for `workspaceClient.environments()` service. Method path has changed. +* [Breaking] Change `updateKnowledgeAssistant()` method for `workspaceClient.knowledgeAssistants()` service. Method path has changed. +* [Breaking] Change `updateBranch()`, `updateDatabase()`, `updateEndpoint()`, `updateProject()` and `updateRole()` methods for `workspaceClient.postgres()` service. Method path has changed. +* [Breaking] Change `updateDefaultWarehouseOverride()` method for `workspaceClient.warehouses()` service. Method path has changed. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 2a9349cf0..8e3b34e3a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -52,6 +52,8 @@ import com.databricks.sdk.service.catalog.RfaService; import com.databricks.sdk.service.catalog.SchemasAPI; import com.databricks.sdk.service.catalog.SchemasService; +import com.databricks.sdk.service.catalog.SecretsUcAPI; +import com.databricks.sdk.service.catalog.SecretsUcService; import com.databricks.sdk.service.catalog.StorageCredentialsAPI; import com.databricks.sdk.service.catalog.StorageCredentialsService; import com.databricks.sdk.service.catalog.SystemSchemasAPI; @@ -246,6 +248,8 @@ import com.databricks.sdk.service.sql.StatementExecutionService; import com.databricks.sdk.service.sql.WarehousesAPI; import com.databricks.sdk.service.sql.WarehousesService; +import com.databricks.sdk.service.supervisoragents.SupervisorAgentsAPI; +import com.databricks.sdk.service.supervisoragents.SupervisorAgentsService; import com.databricks.sdk.service.tags.TagPoliciesAPI; import com.databricks.sdk.service.tags.TagPoliciesService; import com.databricks.sdk.service.tags.WorkspaceEntityTagAssignmentsAPI; @@ -367,6 +371,7 @@ public class WorkspaceClient { private RfaAPI rfaAPI; private SchemasAPI schemasAPI; private SecretsExt secretsAPI; + private SecretsUcAPI secretsUcAPI; private ServicePrincipalSecretsProxyAPI servicePrincipalSecretsProxyAPI; private ServicePrincipalsV2API servicePrincipalsV2API; private ServingEndpointsAPI servingEndpointsAPI; @@ -375,6 +380,7 @@ public class WorkspaceClient { private SharesAPI sharesAPI; private StatementExecutionAPI statementExecutionAPI; private StorageCredentialsAPI storageCredentialsAPI; + private SupervisorAgentsAPI supervisorAgentsAPI; private SystemSchemasAPI systemSchemasAPI; private TableConstraintsAPI tableConstraintsAPI; private TablesAPI tablesAPI; @@ -503,6 +509,7 @@ public WorkspaceClient(DatabricksConfig config) { rfaAPI = new RfaAPI(apiClient); schemasAPI = new SchemasAPI(apiClient); secretsAPI = new SecretsExt(apiClient); + secretsUcAPI = new SecretsUcAPI(apiClient); servicePrincipalSecretsProxyAPI = new ServicePrincipalSecretsProxyAPI(apiClient); servicePrincipalsV2API = new ServicePrincipalsV2API(apiClient); servingEndpointsAPI = new ServingEndpointsAPI(apiClient); @@ -512,6 +519,7 @@ public WorkspaceClient(DatabricksConfig config) { sharesAPI = new SharesAPI(apiClient); statementExecutionAPI = new StatementExecutionAPI(apiClient); storageCredentialsAPI = new StorageCredentialsAPI(apiClient); + supervisorAgentsAPI = new SupervisorAgentsAPI(apiClient); systemSchemasAPI = new SystemSchemasAPI(apiClient); tableConstraintsAPI = new TableConstraintsAPI(apiClient); tablesAPI = new TablesAPI(apiClient); @@ -1005,10 +1013,6 @@ public FeatureStoreAPI featureStore() { * HTTP methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their * URI path. The path is always absolute. * - *
Some Files API client features are currently experimental. To enable them, set - * `enable_experimental_files_api_client = True` in your configuration profile or use the - * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. - * *
Use of Files API may incur Databricks data transfer charges. * *
[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html @@ -1298,8 +1302,8 @@ public PermissionMigrationAPI permissionMigration() { * which users can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage * which users can manage, restart, or attach to clusters. * **[Cluster policy * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. * - * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view, - * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job + * **[Spark Declarative Pipelines permissions](:service:pipelines)** — Manage which users can + * view, manage, run, cancel, or own a Spark Declarative Pipeline. * **[Job * permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a * job. * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, * edit, or manage MLflow experiments. * **[MLflow registered model @@ -1712,6 +1716,18 @@ public SecretsExt secrets() { return secretsAPI; } + /** + * A secret is a Unity Catalog securable object that stores sensitive credential data (such as + * passwords, tokens, and keys) within a three-level namespace + * (**catalog_name.schema_name.secret_name**). + * + *
Secrets can be managed using standard Unity Catalog permissions and are scoped to a schema + * within a catalog. + */ + public SecretsUcAPI secretsUc() { + return secretsUcAPI; + } + /** * These APIs enable administrators to manage service principal secrets at the workspace level. To * use these APIs, the service principal must be first added to the current workspace. @@ -1897,6 +1913,11 @@ public StorageCredentialsAPI storageCredentials() { return storageCredentialsAPI; } + /** Manage Supervisor Agents and related resources. */ + public SupervisorAgentsAPI supervisorAgents() { + return supervisorAgentsAPI; + } + /** * A system schema is a schema that lives within the system catalog. A system schema may contain * information about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage @@ -1963,10 +1984,10 @@ public TagPoliciesAPI tagPolicies() { * a metastore admin needs to enable the external_access_enabled flag (off by default) at the * metastore level. A user needs to be granted the EXTERNAL USE LOCATION permission by external * location owner. For requests on existing external tables, user also needs to be granted the - * EXTERNAL USE SCHEMA permission at the schema level by catalog admin. + * EXTERNAL USE SCHEMA permission at the schema level by catalog owner. * *
Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by - * catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the + * catalog owner explicitly and is not included in schema ownership or ALL PRIVILEGES on the * schema for security reasons. Similarly, EXTERNAL USE LOCATION is an external location level * permission that can only be granted by external location owner explicitly and is not included * in external location ownership or ALL PRIVILEGES on the external location for security reasons. @@ -1990,8 +2011,8 @@ public TemporaryPathCredentialsAPI temporaryPathCredentials() { * reducing the risk of unauthorized access or misuse. To use the temporary table credentials API, * a metastore admin needs to enable the external_access_enabled flag (off by default) at the * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema - * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can - * only be granted by catalog admin explicitly and is not included in schema ownership or ALL + * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can + * only be granted by catalog owner explicitly and is not included in schema ownership or ALL * PRIVILEGES on the schema for security reasons. */ public TemporaryTableCredentialsAPI temporaryTableCredentials() { @@ -3293,6 +3314,17 @@ public WorkspaceClient withSecretsAPI(SecretsExt secrets) { return this; } + /** Replace the default SecretsUcService with a custom implementation. */ + public WorkspaceClient withSecretsUcImpl(SecretsUcService secretsUc) { + return this.withSecretsUcAPI(new SecretsUcAPI(secretsUc)); + } + + /** Replace the default SecretsUcAPI with a custom implementation. */ + public WorkspaceClient withSecretsUcAPI(SecretsUcAPI secretsUc) { + this.secretsUcAPI = secretsUc; + return this; + } + /** Replace the default ServicePrincipalSecretsProxyService with a custom implementation. */ public WorkspaceClient withServicePrincipalSecretsProxyImpl( ServicePrincipalSecretsProxyService servicePrincipalSecretsProxy) { @@ -3388,6 +3420,17 @@ public WorkspaceClient withStorageCredentialsAPI(StorageCredentialsAPI storageCr return this; } + /** Replace the default SupervisorAgentsService with a custom implementation. */ + public WorkspaceClient withSupervisorAgentsImpl(SupervisorAgentsService supervisorAgents) { + return this.withSupervisorAgentsAPI(new SupervisorAgentsAPI(supervisorAgents)); + } + + /** Replace the default SupervisorAgentsAPI with a custom implementation. */ + public WorkspaceClient withSupervisorAgentsAPI(SupervisorAgentsAPI supervisorAgents) { + this.supervisorAgentsAPI = supervisorAgents; + return this; + } + /** Replace the default SystemSchemasService with a custom implementation. */ public WorkspaceClient withSystemSchemasImpl(SystemSchemasService systemSchemas) { return this.withSystemSchemasAPI(new SystemSchemasAPI(systemSchemas)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java index 4b5c6bb3e..8784fa394 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java @@ -8,7 +8,6 @@ import java.util.Map; import java.util.Objects; -/** Next ID: 25 */ @Generated public class ConnectionInfo { /** User-provided free-form text description. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index a289a2e5a..8136de14c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 75 */ +/** Next Id: 77 */ @Generated public enum ConnectionType { BIGQUERY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSecretRequest.java new file mode 100755 index 000000000..f62a5fb62 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSecretRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateSecretRequest { + /** + * The secret object to create. The **name**, **catalog_name**, **schema_name**, and **value** + * fields are required. + */ + @JsonProperty("secret") + private Secret secret; + + public CreateSecretRequest setSecret(Secret secret) { + this.secret = secret; + return this; + } + + public Secret getSecret() { + return secret; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSecretRequest that = (CreateSecretRequest) o; + return Objects.equals(secret, that.secret); + } + + @Override + public int hashCode() { + return Objects.hash(secret); + } + + @Override + public String toString() { + return new ToStringer(CreateSecretRequest.class).add("secret", secret).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSecretRequest.java new file mode 100755 index 000000000..6ad224c23 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSecretRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteSecretRequest { + /** + * The three-level (fully qualified) name of the secret (for example, + * **catalog_name.schema_name.secret_name**). + */ + @JsonIgnore private String fullName; + + public DeleteSecretRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSecretRequest that = (DeleteSecretRequest) o; + return Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(fullName); + } + + @Override + public String toString() { + return new ToStringer(DeleteSecretRequest.class).add("fullName", fullName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java index 11a937f7d..00f9f76e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java @@ -9,7 +9,7 @@ /** * A dependency of a SQL object. One of the following fields must be defined: __table__, - * __function__, __connection__, or __credential__. + * __function__, __connection__, __credential__, __volume__, or __secret__. */ @Generated public class Dependency { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSecretRequest.java new file mode 100755 index 000000000..2e520e4c5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSecretRequest.java @@ -0,0 +1,66 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetSecretRequest { + /** + * The three-level (fully qualified) name of the secret (for example, + * **catalog_name.schema_name.secret_name**). + */ + @JsonIgnore private String fullName; + + /** + * Whether to include secrets in the response for which you only have the **BROWSE** privilege, + * which limits access to metadata. + */ + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + public GetSecretRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetSecretRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSecretRequest that = (GetSecretRequest) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, includeBrowse); + } + + @Override + public String toString() { + return new ToStringer(GetSecretRequest.class) + .add("fullName", fullName) + .add("includeBrowse", includeBrowse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsRequest.java new file mode 100755 index 000000000..723c6f626 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsRequest.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListSecretsRequest { + /** + * The name of the catalog under which to list secrets. Both **catalog_name** and **schema_name** + * must be specified together. + */ + @JsonIgnore + @QueryParam("catalog_name") + private String catalogName; + + /** + * Whether to include secrets in the response for which you only have the **BROWSE** privilege, + * which limits access to metadata. + */ + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + /** + * Maximum number of secrets to return. + * + *
- If not specified, at most 10000 secrets are returned. - If set to a value greater than 0,
+ * the page length is the minimum of this value and 10000. - If set to 0, the page length is set
+ * to 10000. - If set to a value less than 0, an invalid parameter error is returned.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Opaque pagination token to go to the next page based on previous query. The maximum page length
+ * is determined by a server configured value.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /**
+ * The name of the schema under which to list secrets. Both **catalog_name** and **schema_name**
+ * must be specified together.
+ */
+ @JsonIgnore
+ @QueryParam("schema_name")
+ private String schemaName;
+
+ public ListSecretsRequest setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
+ public ListSecretsRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
+ public ListSecretsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListSecretsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListSecretsRequest setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListSecretsRequest that = (ListSecretsRequest) o;
+ return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(schemaName, that.schemaName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogName, includeBrowse, pageSize, pageToken, schemaName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListSecretsRequest.class)
+ .add("catalogName", catalogName)
+ .add("includeBrowse", includeBrowse)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("schemaName", schemaName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsResponse.java
new file mode 100755
index 000000000..96b81a1e6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsResponse.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Response message for ListSecrets. */
+@Generated
+public class ListSecretsResponse {
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * **page_token** should be set to this value for the next request.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** An array of secret objects. */
+ @JsonProperty("secrets")
+ private Collection Secrets can be managed using standard Unity Catalog permissions and are scoped to a schema
+ * within a catalog.
+ */
+@Generated
+public class SecretsUcAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(SecretsUcAPI.class);
+
+ private final SecretsUcService impl;
+
+ /** Regular-use constructor */
+ public SecretsUcAPI(ApiClient apiClient) {
+ impl = new SecretsUcImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public SecretsUcAPI(SecretsUcService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Creates a new secret in Unity Catalog.
+ *
+ * You must be the owner of the parent schema or have the **CREATE_SECRET** and **USE SCHEMA**
+ * privileges on the parent schema and **USE CATALOG** on the parent catalog.
+ *
+ * The secret is stored in the specified catalog and schema, and the **value** field contains
+ * the sensitive data to be securely stored.
+ */
+ public Secret createSecret(CreateSecretRequest request) {
+ return impl.createSecret(request);
+ }
+
+ public void deleteSecret(String fullName) {
+ deleteSecret(new DeleteSecretRequest().setFullName(fullName));
+ }
+
+ /**
+ * Deletes a secret by its three-level (fully qualified) name.
+ *
+ * You must be the owner of the secret or a metastore admin.
+ */
+ public void deleteSecret(DeleteSecretRequest request) {
+ impl.deleteSecret(request);
+ }
+
+ public Secret getSecret(String fullName) {
+ return getSecret(new GetSecretRequest().setFullName(fullName));
+ }
+
+ /**
+ * Gets a secret by its three-level (fully qualified) name.
+ *
+ * You must be a metastore admin, the owner of the secret, or have the **MANAGE** privilege on
+ * the secret.
+ *
+ * The secret value isn't returned by default. To retrieve it, you must also have the
+ * **READ_SECRET** privilege and set **include_value** to true in the request.
+ */
+ public Secret getSecret(GetSecretRequest request) {
+ return impl.getSecret(request);
+ }
+
+ /**
+ * Lists secrets in Unity Catalog.
+ *
+ * You must be a metastore admin, the owner of the secret, or have the **MANAGE** privilege on
+ * the secret.
+ *
+ * Both **catalog_name** and **schema_name** must be specified together to filter secrets
+ * within a specific schema. Results are paginated; use the **page_token** field from the response
+ * to retrieve subsequent pages.
+ */
+ public Iterable You must be the owner of the secret or a metastore admin. If you are a metastore admin, only
+ * the **owner** field can be changed.
+ *
+ * Use the **update_mask** field to specify which fields to update. Supported updatable fields
+ * include **value**, **comment**, **owner**, and **expire_time**.
+ */
+ public Secret updateSecret(UpdateSecretRequest request) {
+ return impl.updateSecret(request);
+ }
+
+ public SecretsUcService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcImpl.java
new file mode 100755
index 000000000..5d8de8806
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcImpl.java
@@ -0,0 +1,105 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of SecretsUc */
+@Generated
+class SecretsUcImpl implements SecretsUcService {
+ private final ApiClient apiClient;
+
+ public SecretsUcImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public Secret createSecret(CreateSecretRequest request) {
+ String path = "/api/2.1/unity-catalog/secrets";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getSecret()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, Secret.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteSecret(DeleteSecretRequest request) {
+ String path = String.format("/api/2.1/unity-catalog/secrets/%s", request.getFullName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Secret getSecret(GetSecretRequest request) {
+ String path = String.format("/api/2.1/unity-catalog/secrets/%s", request.getFullName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, Secret.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListSecretsResponse listSecrets(ListSecretsRequest request) {
+ String path = "/api/2.1/unity-catalog/secrets";
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, ListSecretsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Secret updateSecret(UpdateSecretRequest request) {
+ String path = String.format("/api/2.1/unity-catalog/secrets/%s", request.getFullName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getSecret()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, Secret.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcService.java
new file mode 100755
index 000000000..8e753c35b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcService.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * A secret is a Unity Catalog securable object that stores sensitive credential data (such as
+ * passwords, tokens, and keys) within a three-level namespace
+ * (**catalog_name.schema_name.secret_name**).
+ *
+ * Secrets can be managed using standard Unity Catalog permissions and are scoped to a schema
+ * within a catalog.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface SecretsUcService {
+ /**
+ * Creates a new secret in Unity Catalog.
+ *
+ * You must be the owner of the parent schema or have the **CREATE_SECRET** and **USE SCHEMA**
+ * privileges on the parent schema and **USE CATALOG** on the parent catalog.
+ *
+ * The secret is stored in the specified catalog and schema, and the **value** field contains
+ * the sensitive data to be securely stored.
+ */
+ Secret createSecret(CreateSecretRequest createSecretRequest);
+
+ /**
+ * Deletes a secret by its three-level (fully qualified) name.
+ *
+ * You must be the owner of the secret or a metastore admin.
+ */
+ void deleteSecret(DeleteSecretRequest deleteSecretRequest);
+
+ /**
+ * Gets a secret by its three-level (fully qualified) name.
+ *
+ * You must be a metastore admin, the owner of the secret, or have the **MANAGE** privilege on
+ * the secret.
+ *
+ * The secret value isn't returned by default. To retrieve it, you must also have the
+ * **READ_SECRET** privilege and set **include_value** to true in the request.
+ */
+ Secret getSecret(GetSecretRequest getSecretRequest);
+
+ /**
+ * Lists secrets in Unity Catalog.
+ *
+ * You must be a metastore admin, the owner of the secret, or have the **MANAGE** privilege on
+ * the secret.
+ *
+ * Both **catalog_name** and **schema_name** must be specified together to filter secrets
+ * within a specific schema. Results are paginated; use the **page_token** field from the response
+ * to retrieve subsequent pages.
+ */
+ ListSecretsResponse listSecrets(ListSecretsRequest listSecretsRequest);
+
+ /**
+ * Updates an existing secret in Unity Catalog.
+ *
+ * You must be the owner of the secret or a metastore admin. If you are a metastore admin, only
+ * the **owner** field can be changed.
+ *
+ * Use the **update_mask** field to specify which fields to update. Supported updatable fields
+ * include **value**, **comment**, **owner**, and **expire_time**.
+ */
+ Secret updateSecret(UpdateSecretRequest updateSecretRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
index 9db62bb12..bebea1485 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Latest kind: CONNECTION_VEEVA_VAULT_OAUTH_M2M = 311; Next id: 312 */
+/** Latest kind: ENDPOINT_LLM_PROVIDER = 317; Next id: 318 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java
index a8d040a53..933a88c22 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java
@@ -19,10 +19,10 @@
* admin needs to enable the external_access_enabled flag (off by default) at the metastore level. A
* user needs to be granted the EXTERNAL USE LOCATION permission by external location owner. For
* requests on existing external tables, user also needs to be granted the EXTERNAL USE SCHEMA
- * permission at the schema level by catalog admin.
+ * permission at the schema level by catalog owner.
*
* Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog
- * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+ * owner explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
* security reasons. Similarly, EXTERNAL USE LOCATION is an external location level permission that
* can only be granted by external location owner explicitly and is not included in external
* location ownership or ALL PRIVILEGES on the external location for security reasons.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java
index 9a43feb56..4d1b81ece 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java
@@ -16,10 +16,10 @@
* admin needs to enable the external_access_enabled flag (off by default) at the metastore level. A
* user needs to be granted the EXTERNAL USE LOCATION permission by external location owner. For
* requests on existing external tables, user also needs to be granted the EXTERNAL USE SCHEMA
- * permission at the schema level by catalog admin.
+ * permission at the schema level by catalog owner.
*
* Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog
- * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+ * owner explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
* security reasons. Similarly, EXTERNAL USE LOCATION is an external location level permission that
* can only be granted by external location owner explicitly and is not included in external
* location ownership or ALL PRIVILEGES on the external location for security reasons.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
index 2d7da4059..5a53f265a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
@@ -18,8 +18,8 @@
* the risk of unauthorized access or misuse. To use the temporary table credentials API, a
* metastore admin needs to enable the external_access_enabled flag (off by default) at the
* metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
- * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only
- * be granted by catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES
+ * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can only
+ * be granted by catalog owner explicitly and is not included in schema ownership or ALL PRIVILEGES
* on the schema for security reasons.
*/
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
index 10a02b1cd..f5057721c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java
@@ -15,8 +15,8 @@
* the risk of unauthorized access or misuse. To use the temporary table credentials API, a
* metastore admin needs to enable the external_access_enabled flag (off by default) at the
* metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
- * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only
- * be granted by catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES
+ * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can only
+ * be granted by catalog owner explicitly and is not included in schema ownership or ALL PRIVILEGES
* on the schema for security reasons.
*
* This is the high-level interface, that contains generated methods.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSecretRequest.java
new file mode 100755
index 000000000..0e7298448
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSecretRequest.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateSecretRequest {
+ /**
+ * The three-level (fully qualified) name of the secret (for example,
+ * **catalog_name.schema_name.secret_name**).
+ */
+ @JsonIgnore private String fullName;
+
+ /**
+ * The secret object containing the fields to update. Only fields specified in **update_mask**
+ * will be updated.
+ */
+ @JsonProperty("secret")
+ private Secret secret;
+
+ /**
+ * The field mask specifying which fields of the secret to update. Supported fields: **value**,
+ * **comment**, **owner**, **expire_time**.
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateSecretRequest setFullName(String fullName) {
+ this.fullName = fullName;
+ return this;
+ }
+
+ public String getFullName() {
+ return fullName;
+ }
+
+ public UpdateSecretRequest setSecret(Secret secret) {
+ this.secret = secret;
+ return this;
+ }
+
+ public Secret getSecret() {
+ return secret;
+ }
+
+ public UpdateSecretRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateSecretRequest that = (UpdateSecretRequest) o;
+ return Objects.equals(fullName, that.fullName)
+ && Objects.equals(secret, that.secret)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(fullName, secret, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateSecretRequest.class)
+ .add("fullName", fullName)
+ .add("secret", secret)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
index dc6582a1f..ed2aff3d6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
@@ -10,7 +10,7 @@
/**
* The environment entity used to preserve serverless environment side panel, jobs' environment for
- * non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal
+ * non-notebook task, and SDP's environment for classic and serverless pipelines. In this minimal
* environment spec, only pip and java dependencies are supported.
*/
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
index 57952042f..7f3fa2556 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java
@@ -45,7 +45,7 @@ public PolicyComplianceForClustersAPI(PolicyComplianceForClustersService mock) {
* If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next
* time the cluster is started, the new attributes will take effect.
*
- * Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this
+ * Clusters created by the Databricks Jobs, SDP, or Models services cannot be enforced by this
* API. Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
*/
public EnforceClusterComplianceResponse enforceCompliance(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersService.java
index c7a70552e..cd613370b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersService.java
@@ -31,7 +31,7 @@ public interface PolicyComplianceForClustersService {
* If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next
* time the cluster is started, the new attributes will take effect.
*
- * Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this
+ * Clusters created by the Databricks Jobs, SDP, or Models services cannot be enforced by this
* API. Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
*/
EnforceClusterComplianceResponse enforceCompliance(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java
index 30992dbc6..6c0a4fce3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java
@@ -13,6 +13,13 @@ public class GenieSpace {
@JsonProperty("description")
private String description;
+ /**
+ * ETag for this space. Pass this value back in the update request to prevent overwriting
+ * concurrent changes.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
/** Parent folder path of the Genie Space */
@JsonProperty("parent_path")
private String parentPath;
@@ -47,6 +54,15 @@ public String getDescription() {
return description;
}
+ public GenieSpace setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
public GenieSpace setParentPath(String parentPath) {
this.parentPath = parentPath;
return this;
@@ -98,6 +114,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GenieSpace that = (GenieSpace) o;
return Objects.equals(description, that.description)
+ && Objects.equals(etag, that.etag)
&& Objects.equals(parentPath, that.parentPath)
&& Objects.equals(serializedSpace, that.serializedSpace)
&& Objects.equals(spaceId, that.spaceId)
@@ -107,13 +124,15 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(description, parentPath, serializedSpace, spaceId, title, warehouseId);
+ return Objects.hash(
+ description, etag, parentPath, serializedSpace, spaceId, title, warehouseId);
}
@Override
public String toString() {
return new ToStringer(GenieSpace.class)
.add("description", description)
+ .add("etag", etag)
.add("parentPath", parentPath)
.add("serializedSpace", serializedSpace)
.add("spaceId", spaceId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java
index 938ac63ba..e81c46a49 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java
@@ -14,6 +14,13 @@ public class GenieUpdateSpaceRequest {
@JsonProperty("description")
private String description;
+ /**
+ * ETag returned by a previous GET or UPDATE. When set, the update will fail if the space has been
+ * modified since. Omit to apply the update unconditionally.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
/**
* The contents of the Genie Space in serialized string form (full replacement). Use the [Get
* Genie Space](:method:genie/getspace) API to retrieve an example response, which includes the
@@ -43,6 +50,15 @@ public String getDescription() {
return description;
}
+ public GenieUpdateSpaceRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
public GenieUpdateSpaceRequest setSerializedSpace(String serializedSpace) {
this.serializedSpace = serializedSpace;
return this;
@@ -85,6 +101,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GenieUpdateSpaceRequest that = (GenieUpdateSpaceRequest) o;
return Objects.equals(description, that.description)
+ && Objects.equals(etag, that.etag)
&& Objects.equals(serializedSpace, that.serializedSpace)
&& Objects.equals(spaceId, that.spaceId)
&& Objects.equals(title, that.title)
@@ -93,13 +110,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(description, serializedSpace, spaceId, title, warehouseId);
+ return Objects.hash(description, etag, serializedSpace, spaceId, title, warehouseId);
}
@Override
public String toString() {
return new ToStringer(GenieUpdateSpaceRequest.class)
.add("description", description)
+ .add("etag", etag)
.add("serializedSpace", serializedSpace)
.add("spaceId", spaceId)
.add("title", title)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java
index 25ff5955c..cf0dc64b3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java
@@ -14,7 +14,14 @@ public class Thought {
@JsonProperty("content")
private String content;
- /** The category of this thought. */
+ /**
+ * The category of this thought. The possible values are: * `THOUGHT_TYPE_DESCRIPTION`: A
+ * high-level description of how the question was interpreted. * `THOUGHT_TYPE_UNDERSTANDING`: How
+ * ambiguous parts of the question were resolved. * `THOUGHT_TYPE_DATA_SOURCING`: Which tables or
+ * datasets were identified as relevant. * `THOUGHT_TYPE_INSTRUCTIONS`: Which author-defined
+ * instructions were referenced. * `THOUGHT_TYPE_STEPS`: The logical steps taken to compute the
+ * answer.
+ */
@JsonProperty("thought_type")
private ThoughtType thoughtType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
index 118eacfd9..579389ac9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
@@ -21,10 +21,6 @@
* methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI
* path. The path is always absolute.
*
- * Some Files API client features are currently experimental. To enable them, set
- * `enable_experimental_files_api_client = True` in your configuration profile or use the
- * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
- *
* Use of Files API may incur Databricks data transfer charges.
*
* [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
index e2d7724f6..86fb44603 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
@@ -17,10 +17,6 @@
* methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI
* path. The path is always absolute.
*
- * Some Files API client features are currently experimental. To enable them, set
- * `enable_experimental_files_api_client = True` in your configuration profile or use the
- * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
- *
* Use of Files API may incur Databricks data transfer charges.
*
* [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
index 719dbe347..1dae5c2a0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
@@ -12,8 +12,8 @@
* can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can
* manage, restart, or attach to clusters. * **[Cluster policy
* permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. *
- * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
- * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job permissions](:service:jobs)**
+ * **[Spark Declarative Pipelines permissions](:service:pipelines)** — Manage which users can view,
+ * manage, run, cancel, or own a Spark Declarative Pipeline. * **[Job permissions](:service:jobs)**
* — Manage which users can view, manage, trigger, cancel, or own a job. * **[MLflow experiment
* permissions](:service:experiments)** — Manage which users can read, edit, or manage MLflow
* experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
index 0b011366f..4b3a00859 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
@@ -9,8 +9,8 @@
* can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can
* manage, restart, or attach to clusters. * **[Cluster policy
* permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. *
- * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
- * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job permissions](:service:jobs)**
+ * **[Spark Declarative Pipelines permissions](:service:pipelines)** — Manage which users can view,
+ * manage, run, cancel, or own a Spark Declarative Pipeline. * **[Job permissions](:service:jobs)**
* — Manage which users can view, manage, trigger, cancel, or own a job. * **[MLflow experiment
* permissions](:service:experiments)** — Manage which users can read, edit, or manage MLflow
* experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
index 23c06ff1f..1edadfb6b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
@@ -49,6 +49,8 @@
*/
@Generated
public enum TerminationCodeCode {
+ BREAKING_CHANGE, // Run failed because of an intentional breaking change in Spark, but it will be
+ // retried with a mitigation config.
BUDGET_POLICY_LIMIT_EXCEEDED,
CANCELED, // The run was canceled during execution by the Use this field when building UI components that display branches to users (e.g., a drop-down
+ * selector). Prefer showing `branch_id` instead of the full resource name from `Branch.name`,
+ * which follows the `projects/{project_id}/branches/{branch_id}` format and is not user-friendly.
+ */
+ @JsonProperty("branch_id")
+ private String branchId;
+
/** The branch's state, indicating if it is initializing, ready for use, or archived. */
@JsonProperty("current_state")
private BranchStatusState currentState;
@@ -53,6 +64,15 @@ public class BranchStatus {
@JsonProperty("state_change_time")
private Timestamp stateChangeTime;
+ public BranchStatus setBranchId(String branchId) {
+ this.branchId = branchId;
+ return this;
+ }
+
+ public String getBranchId() {
+ return branchId;
+ }
+
public BranchStatus setCurrentState(BranchStatusState currentState) {
this.currentState = currentState;
return this;
@@ -148,7 +168,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BranchStatus that = (BranchStatus) o;
- return Objects.equals(currentState, that.currentState)
+ return Objects.equals(branchId, that.branchId)
+ && Objects.equals(currentState, that.currentState)
&& Objects.equals(defaultValue, that.defaultValue)
&& Objects.equals(expireTime, that.expireTime)
&& Objects.equals(isProtected, that.isProtected)
@@ -163,6 +184,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
+ branchId,
currentState,
defaultValue,
expireTime,
@@ -178,6 +200,7 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(BranchStatus.class)
+ .add("branchId", branchId)
.add("currentState", currentState)
.add("defaultValue", defaultValue)
.add("expireTime", expireTime)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CatalogCatalogStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CatalogCatalogStatus.java
index 7e39a11b2..a8f0eb3aa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CatalogCatalogStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CatalogCatalogStatus.java
@@ -18,6 +18,17 @@ public class CatalogCatalogStatus {
@JsonProperty("branch")
private String branch;
+ /**
+ * The short identifier of the catalog, suitable for showing to the users. For a catalog with name
+ * `catalogs/my-catalog`, the catalog_id is `my-catalog`.
+ *
+ * Use this field when building UI components that display catalogs to users (e.g., a drop-down
+ * selector). Prefer showing `catalog_id` instead of the full resource name from `Catalog.name`,
+ * which follows the `catalogs/{catalog_id}` format and is not user-friendly.
+ */
+ @JsonProperty("catalog_id")
+ private String catalogId;
+
/** The name of the Postgres database associated with the catalog. */
@JsonProperty("postgres_database")
private String postgresDatabase;
@@ -39,6 +50,15 @@ public String getBranch() {
return branch;
}
+ public CatalogCatalogStatus setCatalogId(String catalogId) {
+ this.catalogId = catalogId;
+ return this;
+ }
+
+ public String getCatalogId() {
+ return catalogId;
+ }
+
public CatalogCatalogStatus setPostgresDatabase(String postgresDatabase) {
this.postgresDatabase = postgresDatabase;
return this;
@@ -63,19 +83,21 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
CatalogCatalogStatus that = (CatalogCatalogStatus) o;
return Objects.equals(branch, that.branch)
+ && Objects.equals(catalogId, that.catalogId)
&& Objects.equals(postgresDatabase, that.postgresDatabase)
&& Objects.equals(project, that.project);
}
@Override
public int hashCode() {
- return Objects.hash(branch, postgresDatabase, project);
+ return Objects.hash(branch, catalogId, postgresDatabase, project);
}
@Override
public String toString() {
return new ToStringer(CatalogCatalogStatus.class)
.add("branch", branch)
+ .add("catalogId", catalogId)
.add("postgresDatabase", postgresDatabase)
.add("project", project)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabaseDatabaseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabaseDatabaseStatus.java
index 80a25ed7b..53aec7344 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabaseDatabaseStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabaseDatabaseStatus.java
@@ -9,6 +9,19 @@
@Generated
public class DatabaseDatabaseStatus {
+ /**
+ * The short identifier of the database, suitable for showing to the users. For a database with
+ * name `projects/my-project/branches/my-branch/databases/my-db`, the database_id is `my-db`.
+ *
+ * Use this field when building UI components that display databases to users (e.g., a
+ * drop-down selector). Prefer showing `database_id` instead of the full resource name from
+ * `Database.name`, which follows the
+ * `projects/{project_id}/branches/{branch_id}/databases/{database_id}` format and is not
+ * user-friendly.
+ */
+ @JsonProperty("database_id")
+ private String databaseId;
+
/** The name of the Postgres database. */
@JsonProperty("postgres_database")
private String postgresDatabase;
@@ -20,6 +33,15 @@ public class DatabaseDatabaseStatus {
@JsonProperty("role")
private String role;
+ public DatabaseDatabaseStatus setDatabaseId(String databaseId) {
+ this.databaseId = databaseId;
+ return this;
+ }
+
+ public String getDatabaseId() {
+ return databaseId;
+ }
+
public DatabaseDatabaseStatus setPostgresDatabase(String postgresDatabase) {
this.postgresDatabase = postgresDatabase;
return this;
@@ -43,18 +65,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DatabaseDatabaseStatus that = (DatabaseDatabaseStatus) o;
- return Objects.equals(postgresDatabase, that.postgresDatabase)
+ return Objects.equals(databaseId, that.databaseId)
+ && Objects.equals(postgresDatabase, that.postgresDatabase)
&& Objects.equals(role, that.role);
}
@Override
public int hashCode() {
- return Objects.hash(postgresDatabase, role);
+ return Objects.hash(databaseId, postgresDatabase, role);
}
@Override
public String toString() {
return new ToStringer(DatabaseDatabaseStatus.class)
+ .add("databaseId", databaseId)
.add("postgresDatabase", postgresDatabase)
.add("role", role)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java
index b4c3d11b1..34743c1e2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java
@@ -30,6 +30,20 @@ public class EndpointStatus {
@JsonProperty("disabled")
private Boolean disabled;
+ /**
+ * The short identifier of the endpoint, suitable for showing to the users. For an endpoint with
+ * name `projects/my-project/branches/my-branch/endpoints/my-endpoint`, the endpoint_id is
+ * `my-endpoint`.
+ *
+ * Use this field when building UI components that display endpoints to users (e.g., a
+ * drop-down selector). Prefer showing `endpoint_id` instead of the full resource name from
+ * `Endpoint.name`, which follows the
+ * `projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}` format and is not
+ * user-friendly.
+ */
+ @JsonProperty("endpoint_id")
+ private String endpointId;
+
/** The endpoint type. A branch can only have one READ_WRITE endpoint. */
@JsonProperty("endpoint_type")
private EndpointType endpointType;
@@ -90,6 +104,15 @@ public Boolean getDisabled() {
return disabled;
}
+ public EndpointStatus setEndpointId(String endpointId) {
+ this.endpointId = endpointId;
+ return this;
+ }
+
+ public String getEndpointId() {
+ return endpointId;
+ }
+
public EndpointStatus setEndpointType(EndpointType endpointType) {
this.endpointType = endpointType;
return this;
@@ -153,6 +176,7 @@ public boolean equals(Object o) {
&& Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu)
&& Objects.equals(currentState, that.currentState)
&& Objects.equals(disabled, that.disabled)
+ && Objects.equals(endpointId, that.endpointId)
&& Objects.equals(endpointType, that.endpointType)
&& Objects.equals(group, that.group)
&& Objects.equals(hosts, that.hosts)
@@ -168,6 +192,7 @@ public int hashCode() {
autoscalingLimitMinCu,
currentState,
disabled,
+ endpointId,
endpointType,
group,
hosts,
@@ -183,6 +208,7 @@ public String toString() {
.add("autoscalingLimitMinCu", autoscalingLimitMinCu)
.add("currentState", currentState)
.add("disabled", disabled)
+ .add("endpointId", endpointId)
.add("endpointType", endpointType)
.add("group", group)
.add("hosts", hosts)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java
index c01a7b285..477fbc49d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java
@@ -51,6 +51,17 @@ public class ProjectStatus {
@JsonProperty("pg_version")
private Long pgVersion;
+ /**
+ * The short identifier of the project, suitable for showing to the users. For a project with name
+ * `projects/my-project`, the project_id is `my-project`.
+ *
+ * Use this field when building UI components that display projects to users (e.g., a drop-down
+ * selector). Prefer showing `project_id` instead of the full resource name from `Project.name`,
+ * which follows the `projects/{project_id}` format and is not user-friendly.
+ */
+ @JsonProperty("project_id")
+ private String projectId;
+
/** The current space occupied by the project in storage. */
@JsonProperty("synthetic_storage_size_bytes")
private Long syntheticStorageSizeBytes;
@@ -146,6 +157,15 @@ public Long getPgVersion() {
return pgVersion;
}
+ public ProjectStatus setProjectId(String projectId) {
+ this.projectId = projectId;
+ return this;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
public ProjectStatus setSyntheticStorageSizeBytes(Long syntheticStorageSizeBytes) {
this.syntheticStorageSizeBytes = syntheticStorageSizeBytes;
return this;
@@ -170,6 +190,7 @@ public boolean equals(Object o) {
&& Objects.equals(historyRetentionDuration, that.historyRetentionDuration)
&& Objects.equals(owner, that.owner)
&& Objects.equals(pgVersion, that.pgVersion)
+ && Objects.equals(projectId, that.projectId)
&& Objects.equals(syntheticStorageSizeBytes, that.syntheticStorageSizeBytes);
}
@@ -186,6 +207,7 @@ public int hashCode() {
historyRetentionDuration,
owner,
pgVersion,
+ projectId,
syntheticStorageSizeBytes);
}
@@ -202,6 +224,7 @@ public String toString() {
.add("historyRetentionDuration", historyRetentionDuration)
.add("owner", owner)
.add("pgVersion", pgVersion)
+ .add("projectId", projectId)
.add("syntheticStorageSizeBytes", syntheticStorageSizeBytes)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleStatus.java
index abb3d9ea0..2a1b91884 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleStatus.java
@@ -30,6 +30,18 @@ public class RoleRoleStatus {
@JsonProperty("postgres_role")
private String postgresRole;
+ /**
+ * The short identifier of the role, suitable for showing to the users. For a role with name
+ * `projects/my-project/branches/my-branch/roles/my-role`, the role_id is `my-role`.
+ *
+ * Use this field when building UI components that display roles to users (e.g., a drop-down
+ * selector). Prefer showing `role_id` instead of the full resource name from `Role.name`, which
+ * follows the `projects/{project_id}/branches/{branch_id}/roles/{role_id}` format and is not
+ * user-friendly.
+ */
+ @JsonProperty("role_id")
+ private String roleId;
+
public RoleRoleStatus setAttributes(RoleAttributes attributes) {
this.attributes = attributes;
return this;
@@ -75,6 +87,15 @@ public String getPostgresRole() {
return postgresRole;
}
+ public RoleRoleStatus setRoleId(String roleId) {
+ this.roleId = roleId;
+ return this;
+ }
+
+ public String getRoleId() {
+ return roleId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -84,12 +105,14 @@ public boolean equals(Object o) {
&& Objects.equals(authMethod, that.authMethod)
&& Objects.equals(identityType, that.identityType)
&& Objects.equals(membershipRoles, that.membershipRoles)
- && Objects.equals(postgresRole, that.postgresRole);
+ && Objects.equals(postgresRole, that.postgresRole)
+ && Objects.equals(roleId, that.roleId);
}
@Override
public int hashCode() {
- return Objects.hash(attributes, authMethod, identityType, membershipRoles, postgresRole);
+ return Objects.hash(
+ attributes, authMethod, identityType, membershipRoles, postgresRole, roleId);
}
@Override
@@ -100,6 +123,7 @@ public String toString() {
.add("identityType", identityType)
.add("membershipRoles", membershipRoles)
.add("postgresRole", postgresRole)
+ .add("roleId", roleId)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java
index 33df9b315..e7dc081bc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java
@@ -41,6 +41,14 @@ public class SyncedTableSyncedTableStatus {
@JsonProperty("pipeline_id")
private String pipelineId;
+ /**
+ * The full resource name of the project associated with the table.
+ *
+ * Format: "projects/{project_id}".
+ */
+ @JsonProperty("project")
+ private String project;
+
/** The current phase of the data synchronization pipeline. */
@JsonProperty("provisioning_phase")
private ProvisioningPhase provisioningPhase;
@@ -114,6 +122,15 @@ public String getPipelineId() {
return pipelineId;
}
+ public SyncedTableSyncedTableStatus setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public String getProject() {
+ return project;
+ }
+
public SyncedTableSyncedTableStatus setProvisioningPhase(ProvisioningPhase provisioningPhase) {
this.provisioningPhase = provisioningPhase;
return this;
@@ -145,6 +162,7 @@ public boolean equals(Object o) {
&& Objects.equals(message, that.message)
&& Objects.equals(ongoingSyncProgress, that.ongoingSyncProgress)
&& Objects.equals(pipelineId, that.pipelineId)
+ && Objects.equals(project, that.project)
&& Objects.equals(provisioningPhase, that.provisioningPhase)
&& Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState);
}
@@ -159,6 +177,7 @@ public int hashCode() {
message,
ongoingSyncProgress,
pipelineId,
+ project,
provisioningPhase,
unityCatalogProvisioningState);
}
@@ -173,6 +192,7 @@ public String toString() {
.add("message", message)
.add("ongoingSyncProgress", ongoingSyncProgress)
.add("pipelineId", pipelineId)
+ .add("project", project)
.add("provisioningPhase", provisioningPhase)
.add("unityCatalogProvisioningState", unityCatalogProvisioningState)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java
index 2da9d7bf9..e9d1e6a78 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java
@@ -23,6 +23,13 @@ public class CreateGcpKeyInfo {
@JsonProperty("kms_key_id")
private String kmsKeyId;
+ /**
+ * When true, Databricks will not use OAuth to grant the service account access to the KMS key.
+ * The customer is responsible for granting access manually.
+ */
+ @JsonProperty("manual")
+ private Boolean manual;
+
public CreateGcpKeyInfo setGcpServiceAccount(GcpServiceAccount gcpServiceAccount) {
this.gcpServiceAccount = gcpServiceAccount;
return this;
@@ -41,18 +48,28 @@ public String getKmsKeyId() {
return kmsKeyId;
}
+ public CreateGcpKeyInfo setManual(Boolean manual) {
+ this.manual = manual;
+ return this;
+ }
+
+ public Boolean getManual() {
+ return manual;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateGcpKeyInfo that = (CreateGcpKeyInfo) o;
return Objects.equals(gcpServiceAccount, that.gcpServiceAccount)
- && Objects.equals(kmsKeyId, that.kmsKeyId);
+ && Objects.equals(kmsKeyId, that.kmsKeyId)
+ && Objects.equals(manual, that.manual);
}
@Override
public int hashCode() {
- return Objects.hash(gcpServiceAccount, kmsKeyId);
+ return Objects.hash(gcpServiceAccount, kmsKeyId, manual);
}
@Override
@@ -60,6 +77,7 @@ public String toString() {
return new ToStringer(CreateGcpKeyInfo.class)
.add("gcpServiceAccount", gcpServiceAccount)
.add("kmsKeyId", kmsKeyId)
+ .add("manual", manual)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java
index a9c6d4ad7..a1fa6e14c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java
@@ -51,6 +51,16 @@ public EncryptionKeysAPI(EncryptionKeysService mock) {
*
* This operation is available only if your account is on the E2 version of the platform or on
* a select custom plan that allows multiple workspaces per account.
+ *
+ * **GCP only**: To create a customer-managed key on GCP, you must include the
+ * `X-Databricks-GCP-SA-Access-Token` HTTP header in your request. This header must contain a
+ * Google Cloud OAuth access token with the `cloud-platform` scope. The Google identity associated
+ * with the token must also have the `setIamPermissions` and `getIamPermissions` IAM permissions
+ * on the key resource. For details on obtaining this token, see [Authenticate with Google ID
+ * tokens].
+ *
+ * [Authenticate with Google ID tokens]:
+ * https://docs.databricks.com/gcp/en/dev-tools/auth/authentication-google-id.html
*/
public CustomerManagedKey create(CreateCustomerManagedKeyRequest request) {
return impl.create(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java
index e9741ccb3..748178cc2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java
@@ -39,6 +39,16 @@ public interface EncryptionKeysService {
*
* This operation is available only if your account is on the E2 version of the platform or on
* a select custom plan that allows multiple workspaces per account.
+ *
+ * **GCP only**: To create a customer-managed key on GCP, you must include the
+ * `X-Databricks-GCP-SA-Access-Token` HTTP header in your request. This header must contain a
+ * Google Cloud OAuth access token with the `cloud-platform` scope. The Google identity associated
+ * with the token must also have the `setIamPermissions` and `getIamPermissions` IAM permissions
+ * on the key resource. For details on obtaining this token, see [Authenticate with Google ID
+ * tokens].
+ *
+ * [Authenticate with Google ID tokens]:
+ * https://docs.databricks.com/gcp/en/dev-tools/auth/authentication-google-id.html
*/
CustomerManagedKey create(CreateCustomerManagedKeyRequest createCustomerManagedKeyRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java
index 19d9d692d..a49661077 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java
@@ -23,6 +23,13 @@ public class GcpKeyInfo {
@JsonProperty("kms_key_id")
private String kmsKeyId;
+ /**
+ * When true, Databricks will not use OAuth to grant the service account access to the KMS key.
+ * The customer is responsible for granting access manually.
+ */
+ @JsonProperty("manual")
+ private Boolean manual;
+
public GcpKeyInfo setGcpServiceAccount(GcpServiceAccount gcpServiceAccount) {
this.gcpServiceAccount = gcpServiceAccount;
return this;
@@ -41,18 +48,28 @@ public String getKmsKeyId() {
return kmsKeyId;
}
+ public GcpKeyInfo setManual(Boolean manual) {
+ this.manual = manual;
+ return this;
+ }
+
+ public Boolean getManual() {
+ return manual;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GcpKeyInfo that = (GcpKeyInfo) o;
return Objects.equals(gcpServiceAccount, that.gcpServiceAccount)
- && Objects.equals(kmsKeyId, that.kmsKeyId);
+ && Objects.equals(kmsKeyId, that.kmsKeyId)
+ && Objects.equals(manual, that.manual);
}
@Override
public int hashCode() {
- return Objects.hash(gcpServiceAccount, kmsKeyId);
+ return Objects.hash(gcpServiceAccount, kmsKeyId, manual);
}
@Override
@@ -60,6 +77,7 @@ public String toString() {
return new ToStringer(GcpKeyInfo.class)
.add("gcpServiceAccount", gcpServiceAccount)
.add("kmsKeyId", kmsKeyId)
+ .add("manual", manual)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.java
new file mode 100755
index 000000000..b9a879c16
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CustomerFacingIngressNetworkPolicyAppsRuntimeDestination {
+ /** Must be set to true. */
+ @JsonProperty("all_destinations")
+ private Boolean allDestinations;
+
+ public CustomerFacingIngressNetworkPolicyAppsRuntimeDestination setAllDestinations(
+ Boolean allDestinations) {
+ this.allDestinations = allDestinations;
+ return this;
+ }
+
+ public Boolean getAllDestinations() {
+ return allDestinations;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CustomerFacingIngressNetworkPolicyAppsRuntimeDestination that =
+ (CustomerFacingIngressNetworkPolicyAppsRuntimeDestination) o;
+ return Objects.equals(allDestinations, that.allDestinations);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allDestinations);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.class)
+ .add("allDestinations", allDestinations)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.java
new file mode 100755
index 000000000..8e54c42c9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination {
+ /** Must be set to true. */
+ @JsonProperty("all_destinations")
+ private Boolean allDestinations;
+
+ public CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination setAllDestinations(
+ Boolean allDestinations) {
+ this.allDestinations = allDestinations;
+ return this;
+ }
+
+ public Boolean getAllDestinations() {
+ return allDestinations;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination that =
+ (CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination) o;
+ return Objects.equals(allDestinations, that.allDestinations);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allDestinations);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.class)
+ .add("allDestinations", allDestinations)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java
index fc73424f0..f3e8d0cc0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java
@@ -21,10 +21,7 @@ public class CustomerFacingIngressNetworkPolicyPublicIngressRule {
@JsonProperty("destination")
private CustomerFacingIngressNetworkPolicyRequestDestination destination;
- /**
- * User-provided name for this ingress rule. Helps identify which rule caused a request to be
- * denied or dry-run denied.
- */
+ /** The label for this ingress rule. */
@JsonProperty("label")
private String label;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java
index 39a13b1fb..87f91f574 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java
@@ -16,6 +16,14 @@ public class CustomerFacingIngressNetworkPolicyRequestDestination {
@JsonProperty("all_destinations")
private Boolean allDestinations;
+ /** */
+ @JsonProperty("apps_runtime")
+ private CustomerFacingIngressNetworkPolicyAppsRuntimeDestination appsRuntime;
+
+ /** */
+ @JsonProperty("lakebase_runtime")
+ private CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination lakebaseRuntime;
+
/** */
@JsonProperty("workspace_api")
private CustomerFacingIngressNetworkPolicyWorkspaceApiDestination workspaceApi;
@@ -34,6 +42,26 @@ public Boolean getAllDestinations() {
return allDestinations;
}
+ public CustomerFacingIngressNetworkPolicyRequestDestination setAppsRuntime(
+ CustomerFacingIngressNetworkPolicyAppsRuntimeDestination appsRuntime) {
+ this.appsRuntime = appsRuntime;
+ return this;
+ }
+
+ public CustomerFacingIngressNetworkPolicyAppsRuntimeDestination getAppsRuntime() {
+ return appsRuntime;
+ }
+
+ public CustomerFacingIngressNetworkPolicyRequestDestination setLakebaseRuntime(
+ CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination lakebaseRuntime) {
+ this.lakebaseRuntime = lakebaseRuntime;
+ return this;
+ }
+
+ public CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination getLakebaseRuntime() {
+ return lakebaseRuntime;
+ }
+
public CustomerFacingIngressNetworkPolicyRequestDestination setWorkspaceApi(
CustomerFacingIngressNetworkPolicyWorkspaceApiDestination workspaceApi) {
this.workspaceApi = workspaceApi;
@@ -61,19 +89,23 @@ public boolean equals(Object o) {
CustomerFacingIngressNetworkPolicyRequestDestination that =
(CustomerFacingIngressNetworkPolicyRequestDestination) o;
return Objects.equals(allDestinations, that.allDestinations)
+ && Objects.equals(appsRuntime, that.appsRuntime)
+ && Objects.equals(lakebaseRuntime, that.lakebaseRuntime)
&& Objects.equals(workspaceApi, that.workspaceApi)
&& Objects.equals(workspaceUi, that.workspaceUi);
}
@Override
public int hashCode() {
- return Objects.hash(allDestinations, workspaceApi, workspaceUi);
+ return Objects.hash(allDestinations, appsRuntime, lakebaseRuntime, workspaceApi, workspaceUi);
}
@Override
public String toString() {
return new ToStringer(CustomerFacingIngressNetworkPolicyRequestDestination.class)
.add("allDestinations", allDestinations)
+ .add("appsRuntime", appsRuntime)
+ .add("lakebaseRuntime", lakebaseRuntime)
.add("workspaceApi", workspaceApi)
.add("workspaceUi", workspaceUi)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java
index 886b66041..d98ef6274 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java
@@ -26,6 +26,15 @@ public class EgressNetworkPolicyNetworkAccessPolicy {
private Collection If a token with the specified ID is not valid, this call returns an error
+ * **RESOURCE_DOES_NOT_EXIST**.
+ */
+ public UpdateTokenResponse update(UpdateTokenRequest request) {
+ return impl.update(request);
+ }
+
public TokensService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java
index 5de1f612b..f248f398f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java
@@ -67,4 +67,22 @@ public ListPublicTokensResponse list() {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public UpdateTokenResponse update(UpdateTokenRequest request) {
+ String path = String.format("/api/2.0/token/%s", request.getTokenId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ if (apiClient.workspaceId() != null) {
+ req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId());
+ }
+ return apiClient.execute(req, UpdateTokenResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java
index 9848537ed..05155d7f1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java
@@ -30,4 +30,12 @@ public interface TokensService {
/** Lists all the valid tokens for a user-workspace pair. */
ListPublicTokensResponse list();
+
+ /**
+ * Updates the comment or scopes of a token.
+ *
+ * If a token with the specified ID is not valid, this call returns an error
+ * **RESOURCE_DOES_NOT_EXIST**.
+ */
+ UpdateTokenResponse update(UpdateTokenRequest updateTokenRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenRequest.java
new file mode 100755
index 000000000..0802f7c59
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenRequest.java
@@ -0,0 +1,88 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateTokenRequest {
+ /** */
+ @JsonProperty("token")
+ private PublicTokenInfo token;
+
+ /** The SHA-256 hash of the token to be updated. */
+ @JsonIgnore private String tokenId;
+
+ /**
+ * A list of field name under PublicTokenInfo, For example in request use {"update_mask":
+ * "comment,scopes"}
+ *
+ * The field mask must be a single string, with multiple fields separated by commas (no
+ * spaces). The field path is relative to the resource object, using a dot (`.`) to navigate
+ * sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is
+ * not allowed, as only the entire collection field can be specified. Field names must exactly
+ * match the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateTokenRequest setToken(PublicTokenInfo token) {
+ this.token = token;
+ return this;
+ }
+
+ public PublicTokenInfo getToken() {
+ return token;
+ }
+
+ public UpdateTokenRequest setTokenId(String tokenId) {
+ this.tokenId = tokenId;
+ return this;
+ }
+
+ public String getTokenId() {
+ return tokenId;
+ }
+
+ public UpdateTokenRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateTokenRequest that = (UpdateTokenRequest) o;
+ return Objects.equals(token, that.token)
+ && Objects.equals(tokenId, that.tokenId)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(token, tokenId, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateTokenRequest.class)
+ .add("token", token)
+ .add("tokenId", tokenId)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenResponse.java
new file mode 100755
index 000000000..a25e7eec1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class UpdateTokenResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateTokenResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java
new file mode 100755
index 000000000..e2876bbe3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Databricks app. Supported app: custom mcp, custom agent. */
+@Generated
+public class App {
+ /** App name */
+ @JsonProperty("name")
+ private String name;
+
+ public App setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ App that = (App) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(App.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java
new file mode 100755
index 000000000..1093f2d99
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Databricks connection. Supported connection: external mcp server. */
+@Generated
+public class Connection {
+ /** */
+ @JsonProperty("name")
+ private String name;
+
+ public Connection setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Connection that = (Connection) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Connection.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java
new file mode 100755
index 000000000..4e056e875
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateSupervisorAgentRequest {
+ /** The Supervisor Agent to create. */
+ @JsonProperty("supervisor_agent")
+ private SupervisorAgent supervisorAgent;
+
+ public CreateSupervisorAgentRequest setSupervisorAgent(SupervisorAgent supervisorAgent) {
+ this.supervisorAgent = supervisorAgent;
+ return this;
+ }
+
+ public SupervisorAgent getSupervisorAgent() {
+ return supervisorAgent;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateSupervisorAgentRequest that = (CreateSupervisorAgentRequest) o;
+ return Objects.equals(supervisorAgent, that.supervisorAgent);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(supervisorAgent);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateSupervisorAgentRequest.class)
+ .add("supervisorAgent", supervisorAgent)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java
new file mode 100755
index 000000000..d7a3be1d7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java
@@ -0,0 +1,81 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateToolRequest {
+ /**
+ * Parent resource where this tool will be created. Format:
+ * supervisor-agents/{supervisor_agent_id}
+ */
+ @JsonIgnore private String parent;
+
+ /** */
+ @JsonProperty("tool")
+ private Tool tool;
+
+ /**
+ * The ID to use for the tool, which will become the final component of the tool's resource name.
+ */
+ @JsonIgnore
+ @QueryParam("tool_id")
+ private String toolId;
+
+ public CreateToolRequest setParent(String parent) {
+ this.parent = parent;
+ return this;
+ }
+
+ public String getParent() {
+ return parent;
+ }
+
+ public CreateToolRequest setTool(Tool tool) {
+ this.tool = tool;
+ return this;
+ }
+
+ public Tool getTool() {
+ return tool;
+ }
+
+ public CreateToolRequest setToolId(String toolId) {
+ this.toolId = toolId;
+ return this;
+ }
+
+ public String getToolId() {
+ return toolId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateToolRequest that = (CreateToolRequest) o;
+ return Objects.equals(parent, that.parent)
+ && Objects.equals(tool, that.tool)
+ && Objects.equals(toolId, that.toolId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(parent, tool, toolId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateToolRequest.class)
+ .add("parent", parent)
+ .add("tool", tool)
+ .add("toolId", toolId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java
new file mode 100755
index 000000000..ad751ef0e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteSupervisorAgentRequest {
+ /** The resource name of the Supervisor Agent. Format: supervisor-agents/{supervisor_agent_id} */
+ @JsonIgnore private String name;
+
+ public DeleteSupervisorAgentRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteSupervisorAgentRequest that = (DeleteSupervisorAgentRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteSupervisorAgentRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteToolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteToolRequest.java
new file mode 100755
index 000000000..e16e806cf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteToolRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteToolRequest {
+ /**
+ * The resource name of the Tool. Format: supervisor-agents/{supervisor_agent_id}/tools/{tool_id}
+ */
+ @JsonIgnore private String name;
+
+ public DeleteToolRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteToolRequest that = (DeleteToolRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteToolRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GenieSpace.java
new file mode 100755
index 000000000..5fac01eda
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GenieSpace.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieSpace {
+ /** The ID of the genie space. */
+ @JsonProperty("id")
+ private String id;
+
+ public GenieSpace setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieSpace that = (GenieSpace) o;
+ return Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieSpace.class).add("id", id).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetSupervisorAgentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetSupervisorAgentRequest.java
new file mode 100755
index 000000000..8429b659a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetSupervisorAgentRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetSupervisorAgentRequest {
+ /** The resource name of the Supervisor Agent. Format: supervisor-agents/{supervisor_agent_id} */
+ @JsonIgnore private String name;
+
+ public GetSupervisorAgentRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetSupervisorAgentRequest that = (GetSupervisorAgentRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetSupervisorAgentRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetToolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetToolRequest.java
new file mode 100755
index 000000000..b989de681
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetToolRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetToolRequest {
+ /**
+ * The resource name of the Tool. Format: supervisor-agents/{supervisor_agent_id}/tools/{tool_id}
+ */
+ @JsonIgnore private String name;
+
+ public GetToolRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetToolRequest that = (GetToolRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetToolRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/KnowledgeAssistant.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/KnowledgeAssistant.java
new file mode 100755
index 000000000..6aad1e016
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/KnowledgeAssistant.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class KnowledgeAssistant {
+ /** The ID of the knowledge assistant. */
+ @JsonProperty("knowledge_assistant_id")
+ private String knowledgeAssistantId;
+
+ /** Deprecated: use knowledge_assistant_id instead. */
+ @JsonProperty("serving_endpoint_name")
+ private String servingEndpointName;
+
+ public KnowledgeAssistant setKnowledgeAssistantId(String knowledgeAssistantId) {
+ this.knowledgeAssistantId = knowledgeAssistantId;
+ return this;
+ }
+
+ public String getKnowledgeAssistantId() {
+ return knowledgeAssistantId;
+ }
+
+ public KnowledgeAssistant setServingEndpointName(String servingEndpointName) {
+ this.servingEndpointName = servingEndpointName;
+ return this;
+ }
+
+ public String getServingEndpointName() {
+ return servingEndpointName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KnowledgeAssistant that = (KnowledgeAssistant) o;
+ return Objects.equals(knowledgeAssistantId, that.knowledgeAssistantId)
+ && Objects.equals(servingEndpointName, that.servingEndpointName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(knowledgeAssistantId, servingEndpointName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KnowledgeAssistant.class)
+ .add("knowledgeAssistantId", knowledgeAssistantId)
+ .add("servingEndpointName", servingEndpointName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsRequest.java
new file mode 100755
index 000000000..14b595a87
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsRequest.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListSupervisorAgentsRequest {
+ /**
+ * The maximum number of supervisor agents to return. If unspecified, at most 100 supervisor
+ * agents will be returned. The maximum value is 100; values above 100 will be coerced to 100.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous `ListSupervisorAgents` call. Provide this to retrieve
+ * the subsequent page. If unspecified, the first page will be returned.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListSupervisorAgentsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListSupervisorAgentsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListSupervisorAgentsRequest that = (ListSupervisorAgentsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListSupervisorAgentsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsResponse.java
new file mode 100755
index 000000000..25dc4747a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsResponse.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListSupervisorAgentsResponse {
+ /**
+ * A token that can be sent as `page_token` to retrieve the next page. If this field is omitted,
+ * there are no subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("supervisor_agents")
+ private Collection This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface SupervisorAgentsService {
+ /** Creates a new Supervisor Agent. */
+ SupervisorAgent createSupervisorAgent(CreateSupervisorAgentRequest createSupervisorAgentRequest);
+
+ /**
+ * Creates a Tool under a Supervisor Agent. Specify one of "genie_space", "knowledge_assistant",
+ * "uc_function", "connection", "app", "volume", "lakeview_dashboard" in the request body.
+ */
+ Tool createTool(CreateToolRequest createToolRequest);
+
+ /** Deletes a Supervisor Agent. */
+ void deleteSupervisorAgent(DeleteSupervisorAgentRequest deleteSupervisorAgentRequest);
+
+ /** Deletes a Tool. */
+ void deleteTool(DeleteToolRequest deleteToolRequest);
+
+ /** Gets a Supervisor Agent. */
+ SupervisorAgent getSupervisorAgent(GetSupervisorAgentRequest getSupervisorAgentRequest);
+
+ /** Gets a Tool. */
+ Tool getTool(GetToolRequest getToolRequest);
+
+ /** Lists Supervisor Agents. */
+ ListSupervisorAgentsResponse listSupervisorAgents(
+ ListSupervisorAgentsRequest listSupervisorAgentsRequest);
+
+ /** Lists Tools under a Supervisor Agent. */
+ ListToolsResponse listTools(ListToolsRequest listToolsRequest);
+
+ /**
+ * Updates a Supervisor Agent. The fields that are required depend on the paths specified in
+ * `update_mask`. Only fields included in the mask will be updated.
+ */
+ SupervisorAgent updateSupervisorAgent(UpdateSupervisorAgentRequest updateSupervisorAgentRequest);
+
+ /**
+ * Updates a Tool. Only the `description` field can be updated. To change immutable fields such as
+ * tool type, spec, or tool ID, delete the tool and recreate it.
+ */
+ Tool updateTool(UpdateToolRequest updateToolRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java
new file mode 100755
index 000000000..7fede2aac
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java
@@ -0,0 +1,208 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class Tool {
+ /** */
+ @JsonProperty("app")
+ private App app;
+
+ /** */
+ @JsonProperty("connection")
+ private Connection connection;
+
+ /** Description of what this tool does (user-facing). */
+ @JsonProperty("description")
+ private String description;
+
+ /** */
+ @JsonProperty("genie_space")
+ private GenieSpace genieSpace;
+
+ /** Deprecated: Use tool_id instead. */
+ @JsonProperty("id")
+ private String id;
+
+ /** */
+ @JsonProperty("knowledge_assistant")
+ private KnowledgeAssistant knowledgeAssistant;
+
+ /** Full resource name: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} */
+ @JsonProperty("name")
+ private String name;
+
+ /** User specified id of the Tool. */
+ @JsonProperty("tool_id")
+ private String toolId;
+
+ /**
+ * Tool type. Must be one of: "genie_space", "knowledge_assistant", "uc_function", "connection",
+ * "app", "volume", "lakeview_dashboard", "serving_endpoint".
+ */
+ @JsonProperty("tool_type")
+ private String toolType;
+
+ /** */
+ @JsonProperty("uc_function")
+ private UcFunction ucFunction;
+
+ /** */
+ @JsonProperty("volume")
+ private Volume volume;
+
+ public Tool setApp(App app) {
+ this.app = app;
+ return this;
+ }
+
+ public App getApp() {
+ return app;
+ }
+
+ public Tool setConnection(Connection connection) {
+ this.connection = connection;
+ return this;
+ }
+
+ public Connection getConnection() {
+ return connection;
+ }
+
+ public Tool setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public Tool setGenieSpace(GenieSpace genieSpace) {
+ this.genieSpace = genieSpace;
+ return this;
+ }
+
+ public GenieSpace getGenieSpace() {
+ return genieSpace;
+ }
+
+ public Tool setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public Tool setKnowledgeAssistant(KnowledgeAssistant knowledgeAssistant) {
+ this.knowledgeAssistant = knowledgeAssistant;
+ return this;
+ }
+
+ public KnowledgeAssistant getKnowledgeAssistant() {
+ return knowledgeAssistant;
+ }
+
+ public Tool setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Tool setToolId(String toolId) {
+ this.toolId = toolId;
+ return this;
+ }
+
+ public String getToolId() {
+ return toolId;
+ }
+
+ public Tool setToolType(String toolType) {
+ this.toolType = toolType;
+ return this;
+ }
+
+ public String getToolType() {
+ return toolType;
+ }
+
+ public Tool setUcFunction(UcFunction ucFunction) {
+ this.ucFunction = ucFunction;
+ return this;
+ }
+
+ public UcFunction getUcFunction() {
+ return ucFunction;
+ }
+
+ public Tool setVolume(Volume volume) {
+ this.volume = volume;
+ return this;
+ }
+
+ public Volume getVolume() {
+ return volume;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Tool that = (Tool) o;
+ return Objects.equals(app, that.app)
+ && Objects.equals(connection, that.connection)
+ && Objects.equals(description, that.description)
+ && Objects.equals(genieSpace, that.genieSpace)
+ && Objects.equals(id, that.id)
+ && Objects.equals(knowledgeAssistant, that.knowledgeAssistant)
+ && Objects.equals(name, that.name)
+ && Objects.equals(toolId, that.toolId)
+ && Objects.equals(toolType, that.toolType)
+ && Objects.equals(ucFunction, that.ucFunction)
+ && Objects.equals(volume, that.volume);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ app,
+ connection,
+ description,
+ genieSpace,
+ id,
+ knowledgeAssistant,
+ name,
+ toolId,
+ toolType,
+ ucFunction,
+ volume);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Tool.class)
+ .add("app", app)
+ .add("connection", connection)
+ .add("description", description)
+ .add("genieSpace", genieSpace)
+ .add("id", id)
+ .add("knowledgeAssistant", knowledgeAssistant)
+ .add("name", name)
+ .add("toolId", toolId)
+ .add("toolType", toolType)
+ .add("ucFunction", ucFunction)
+ .add("volume", volume)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java
new file mode 100755
index 000000000..58e617513
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UcFunction {
+ /** Full uc function name */
+ @JsonProperty("name")
+ private String name;
+
+ public UcFunction setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UcFunction that = (UcFunction) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UcFunction.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java
new file mode 100755
index 000000000..74290346d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java
@@ -0,0 +1,77 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateSupervisorAgentRequest {
+ /** The resource name of the SupervisorAgent. Format: supervisor-agents/{supervisor_agent_id} */
+ @JsonIgnore private String name;
+
+ /** The SupervisorAgent to update. */
+ @JsonProperty("supervisor_agent")
+ private SupervisorAgent supervisorAgent;
+
+ /** Field mask for fields to be updated. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateSupervisorAgentRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateSupervisorAgentRequest setSupervisorAgent(SupervisorAgent supervisorAgent) {
+ this.supervisorAgent = supervisorAgent;
+ return this;
+ }
+
+ public SupervisorAgent getSupervisorAgent() {
+ return supervisorAgent;
+ }
+
+ public UpdateSupervisorAgentRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateSupervisorAgentRequest that = (UpdateSupervisorAgentRequest) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(supervisorAgent, that.supervisorAgent)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, supervisorAgent, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateSupervisorAgentRequest.class)
+ .add("name", name)
+ .add("supervisorAgent", supervisorAgent)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java
new file mode 100755
index 000000000..027cf67bc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java
@@ -0,0 +1,77 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateToolRequest {
+ /** Full resource name: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} */
+ @JsonIgnore private String name;
+
+ /** The Tool to update. */
+ @JsonProperty("tool")
+ private Tool tool;
+
+ /** Field mask for fields to be updated. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateToolRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateToolRequest setTool(Tool tool) {
+ this.tool = tool;
+ return this;
+ }
+
+ public Tool getTool() {
+ return tool;
+ }
+
+ public UpdateToolRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateToolRequest that = (UpdateToolRequest) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(tool, that.tool)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, tool, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateToolRequest.class)
+ .add("name", name)
+ .add("tool", tool)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Volume.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Volume.java
new file mode 100755
index 000000000..6c96aff1c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Volume.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.supervisoragents;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class Volume {
+ /** Full uc volume name */
+ @JsonProperty("name")
+ private String name;
+
+ public Volume setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Volume that = (Volume) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Volume.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java
index 9f2f17700..d8cbaf63a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java
@@ -10,6 +10,14 @@
@Generated
public class DeltaSyncVectorIndexSpecResponse {
+ /**
+ * [Optional] Select the columns to sync with the vector index. If you leave this field blank, all
+ * columns from the source table are synced with the index. The primary key column and embedding
+ * source column or embedding vector column are always synced.
+ */
+ @JsonProperty("columns_to_sync")
+ private Collection Object deletion cannot be undone and deleting a directory recursively is not atomic.
*/
@@ -94,8 +94,8 @@ public ObjectInfo getStatus(String path) {
}
/**
- * Deprecated: use WorkspaceHierarchyService.GetTreeNode instead. Gets the status of an object or
- * a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
+ * Gets the status of an object or a directory. If `path` does not exist, this call returns an
+ * error `RESOURCE_DOES_NOT_EXIST`.
*/
public ObjectInfo getStatus(GetStatusRequest request) {
return impl.getStatus(request);
@@ -117,9 +117,8 @@ public Iterable Note that if this operation fails it may have succeeded in creating some of the necessary
* parent directories.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
index a40367da1..c7705bcdf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
@@ -18,10 +18,10 @@
@Generated
public interface WorkspaceService {
/**
- * Deprecated: use WorkspaceHierarchyService.DeleteTreeNode instead. Deletes an object or a
- * directory (and optionally recursively deletes all objects in the directory). * If `path` does
- * not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty
- * directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`.
+ * Deletes an object or a directory (and optionally recursively deletes all objects in the
+ * directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. *
+ * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an
+ * error `DIRECTORY_NOT_EMPTY`.
*
* Object deletion cannot be undone and deleting a directory recursively is not atomic.
*/
@@ -49,8 +49,8 @@ WorkspaceObjectPermissions getPermissions(
GetWorkspaceObjectPermissionsRequest getWorkspaceObjectPermissionsRequest);
/**
- * Deprecated: use WorkspaceHierarchyService.GetTreeNode instead. Gets the status of an object or
- * a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
+ * Gets the status of an object or a directory. If `path` does not exist, this call returns an
+ * error `RESOURCE_DOES_NOT_EXIST`.
*/
ObjectInfo getStatus(GetStatusRequest getStatusRequest);
@@ -64,17 +64,15 @@ WorkspaceObjectPermissions getPermissions(
void importContent(Import importContent);
/**
- * Deprecated: use WorkspaceHierarchyService.ListTreeNodes instead. Lists the contents of a
- * directory, or the object if it is not a directory. If the input path does not exist, this call
- * returns an error `RESOURCE_DOES_NOT_EXIST`.
+ * Lists the contents of a directory, or the object if it is not a directory. If the input path
+ * does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
*/
ListResponse list(ListWorkspaceRequest listWorkspaceRequest);
/**
- * Deprecated: use WorkspaceHierarchyService.CreateTreeNode instead. Creates the specified
- * directory (and necessary parent directories if they do not exist). If there is an object (not a
- * directory) at any prefix of the input path, this call returns an error
- * `RESOURCE_ALREADY_EXISTS`.
+ * Creates the specified directory (and necessary parent directories if they do not exist). If
+ * there is an object (not a directory) at any prefix of the input path, this call returns an
+ * error `RESOURCE_ALREADY_EXISTS`.
*
* Note that if this operation fails it may have succeeded in creating some of the necessary
* parent directories.
diff --git a/tagging.py b/tagging.py
index 56e57781b..79f2894c6 100644
--- a/tagging.py
+++ b/tagging.py
@@ -18,6 +18,7 @@
CHANGELOG_FILE_NAME = "CHANGELOG.md"
PACKAGE_FILE_NAME = ".package.json"
CODEGEN_FILE_NAME = ".codegen.json"
+CREATED_TAGS_FILE_NAME = "created_tags.json"
"""
This script tags the release of the SDKs using a combination of the GitHub API and Git commands.
It reads the local repository to determine necessary changes, updates changelogs, and creates tags.
@@ -467,9 +468,29 @@ def update_changelogs(packages: List[Package]) -> List[TagInfo]:
def push_tags(tag_infos: List[TagInfo]) -> None:
"""
Creates and pushes tags to the repository.
+
+ As a side effect, writes the names of successfully created tags to
+ ``./created_tags.json`` so that workflows triggering this script can
+ discover what was produced (the GitHub Actions workflow uploads this
+ file as the ``created-tags`` artifact).
+
+ Schema:
+ {"tags": ["service-a/v1.2.3", "service-b/v0.4.0"]}
+
+ The manifest is written even if tag creation fails partway through:
+ tags that succeeded before the failure are flushed before the
+ exception is re-raised, so recovery-mode runs still surface their
+ output.
"""
- for tag_info in tag_infos:
- gh.tag(tag_info.tag_name(), tag_info.content)
+ created: List[str] = []
+ try:
+ for tag_info in tag_infos:
+ gh.tag(tag_info.tag_name(), tag_info.content)
+ created.append(tag_info.tag_name())
+ finally:
+ manifest_path = os.path.join(os.getcwd(), CREATED_TAGS_FILE_NAME)
+ with open(manifest_path, "w") as f:
+ json.dump({"tags": created}, f)
def run_command(command: List[str]) -> str: