diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 153783450..62d390339 100755 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -11ae6f9d98f0d0838a5e53c27032f178fecc4ee0 \ No newline at end of file +9e9cd2a1a802f6df10f3a5ffe6aa97b588d5884a \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 6fc8f76f1..4710c40d0 100755 --- a/.gitattributes +++ b/.gitattributes @@ -319,6 +319,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreatePolic databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableRequest.java linguist-generated=true @@ -357,6 +358,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQuali databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java linguist-generated=true @@ -459,6 +461,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRes databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java linguist-generated=true @@ -501,6 +504,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegiste databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java linguist-generated=true @@ -596,6 +601,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo. databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Secret.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Securable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java linguist-generated=true @@ -662,6 +671,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePolic databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java linguist-generated=true @@ -2668,11 +2678,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablem databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthentication.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentityPrincipalType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyIpRanges.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicAccess.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicAccessRestrictionMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java linguist-generated=true @@ -2947,6 +2959,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePers databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java linguist-generated=true @@ -3341,6 +3355,29 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesServi databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteToolRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GenieSpace.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetSupervisorAgentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetToolRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/KnowledgeAssistant.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgent.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Volume.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagAssignmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/CreateTagPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/DeleteTagAssignmentRequest.java linguist-generated=true diff --git a/.github/workflows/next-changelog.yml b/.github/workflows/next-changelog.yml index 847aadb8e..475157d1f 100755 --- a/.github/workflows/next-changelog.yml +++ b/.github/workflows/next-changelog.yml @@ -11,8 +11,8 @@ jobs: # Allow Dependabot PRs to pass without a changelog entry if: github.actor != 'dependabot[bot]' runs-on: - group: databricks-deco-testing-runner-group - labels: ubuntu-latest-deco + group: databricks-protected-runner-group + labels: linux-ubuntu-latest steps: - name: Checkout code diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml index c3c6d2d00..e22f794e0 100755 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -32,8 +32,8 @@ jobs: github.repository == 'databricks/databricks-sdk-java' environment: "release-is" runs-on: - group: databricks-deco-testing-runner-group - labels: ubuntu-latest-deco + group: databricks-protected-runner-group + labels: linux-ubuntu-latest steps: - name: Generate GitHub App Token id: generate-token @@ -62,3 +62,11 @@ jobs: GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} GITHUB_REPOSITORY: ${{ github.repository }} run: uv run --locked tagging.py + + - name: Upload created tags artifact + if: always() + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + with: + name: created-tags + path: created_tags.json + if-no-files-found: ignore diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index b25d748cf..bc7d251d6 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -20,3 +20,27 @@ * Migrated internal SDK classes to the logging abstraction. The SDK now supports SLF4J, `java.util.logging`, or a custom backend via `LoggerFactory.setDefault()`. ### API Changes +* Add `com.databricks.sdk.service.supervisoragents` package. +* Add `workspaceClient.secretsUc()` service. +* Add `workspaceClient.supervisorAgents()` service. +* Add `update()` method for `workspaceClient.tokens()` service. +* Add `etag` field for `com.databricks.sdk.service.dashboards.GenieSpace`. +* Add `etag` field for `com.databricks.sdk.service.dashboards.GenieUpdateSpaceRequest`. +* Add `branchId` field for `com.databricks.sdk.service.postgres.BranchStatus`. +* Add `catalogId` field for `com.databricks.sdk.service.postgres.CatalogCatalogStatus`. +* Add `databaseId` field for `com.databricks.sdk.service.postgres.DatabaseDatabaseStatus`. +* Add `endpointId` field for `com.databricks.sdk.service.postgres.EndpointStatus`. +* Add `projectId` field for `com.databricks.sdk.service.postgres.ProjectStatus`. +* Add `roleId` field for `com.databricks.sdk.service.postgres.RoleRoleStatus`. +* Add `project` field for `com.databricks.sdk.service.postgres.SyncedTableSyncedTableStatus`. +* Add `manual` field for `com.databricks.sdk.service.provisioning.CreateGcpKeyInfo`. +* Add `manual` field for `com.databricks.sdk.service.provisioning.GcpKeyInfo`. +* Add `appsRuntime` and `lakebaseRuntime` fields for `com.databricks.sdk.service.settings.CustomerFacingIngressNetworkPolicyRequestDestination`. +* Add `blockedInternetDestinations` field for `com.databricks.sdk.service.settings.EgressNetworkPolicyNetworkAccessPolicy`. +* Add `columnsToSync` field for `com.databricks.sdk.service.vectorsearch.DeltaSyncVectorIndexSpecResponse`. +* Add `BREAKING_CHANGE` enum value for `com.databricks.sdk.service.jobs.TerminationCodeCode`. +* [Breaking] Change `updateCatalogConfig()` method for `workspaceClient.dataClassification()` service. Method path has changed. +* [Breaking] Change `updateDefaultWorkspaceBaseEnvironment()` method for `workspaceClient.environments()` service. Method path has changed. +* [Breaking] Change `updateKnowledgeAssistant()` method for `workspaceClient.knowledgeAssistants()` service. Method path has changed. +* [Breaking] Change `updateBranch()`, `updateDatabase()`, `updateEndpoint()`, `updateProject()` and `updateRole()` methods for `workspaceClient.postgres()` service. Method path has changed. +* [Breaking] Change `updateDefaultWarehouseOverride()` method for `workspaceClient.warehouses()` service. Method path has changed. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 2a9349cf0..8e3b34e3a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -52,6 +52,8 @@ import com.databricks.sdk.service.catalog.RfaService; import com.databricks.sdk.service.catalog.SchemasAPI; import com.databricks.sdk.service.catalog.SchemasService; +import com.databricks.sdk.service.catalog.SecretsUcAPI; +import com.databricks.sdk.service.catalog.SecretsUcService; import com.databricks.sdk.service.catalog.StorageCredentialsAPI; import com.databricks.sdk.service.catalog.StorageCredentialsService; import com.databricks.sdk.service.catalog.SystemSchemasAPI; @@ -246,6 +248,8 @@ import com.databricks.sdk.service.sql.StatementExecutionService; import com.databricks.sdk.service.sql.WarehousesAPI; import com.databricks.sdk.service.sql.WarehousesService; +import com.databricks.sdk.service.supervisoragents.SupervisorAgentsAPI; +import com.databricks.sdk.service.supervisoragents.SupervisorAgentsService; import com.databricks.sdk.service.tags.TagPoliciesAPI; import com.databricks.sdk.service.tags.TagPoliciesService; import com.databricks.sdk.service.tags.WorkspaceEntityTagAssignmentsAPI; @@ -367,6 +371,7 @@ public class WorkspaceClient { private RfaAPI rfaAPI; private SchemasAPI schemasAPI; private SecretsExt secretsAPI; + private SecretsUcAPI secretsUcAPI; private ServicePrincipalSecretsProxyAPI servicePrincipalSecretsProxyAPI; private ServicePrincipalsV2API servicePrincipalsV2API; private ServingEndpointsAPI servingEndpointsAPI; @@ -375,6 +380,7 @@ public class WorkspaceClient { private SharesAPI sharesAPI; private StatementExecutionAPI statementExecutionAPI; private StorageCredentialsAPI storageCredentialsAPI; + private SupervisorAgentsAPI supervisorAgentsAPI; private SystemSchemasAPI systemSchemasAPI; private TableConstraintsAPI tableConstraintsAPI; private TablesAPI tablesAPI; @@ -503,6 +509,7 @@ public WorkspaceClient(DatabricksConfig config) { rfaAPI = new RfaAPI(apiClient); schemasAPI = new SchemasAPI(apiClient); secretsAPI = new SecretsExt(apiClient); + secretsUcAPI = new SecretsUcAPI(apiClient); servicePrincipalSecretsProxyAPI = new ServicePrincipalSecretsProxyAPI(apiClient); servicePrincipalsV2API = new ServicePrincipalsV2API(apiClient); servingEndpointsAPI = new ServingEndpointsAPI(apiClient); @@ -512,6 +519,7 @@ public WorkspaceClient(DatabricksConfig config) { sharesAPI = new SharesAPI(apiClient); statementExecutionAPI = new StatementExecutionAPI(apiClient); storageCredentialsAPI = new StorageCredentialsAPI(apiClient); + supervisorAgentsAPI = new SupervisorAgentsAPI(apiClient); systemSchemasAPI = new SystemSchemasAPI(apiClient); tableConstraintsAPI = new TableConstraintsAPI(apiClient); tablesAPI = new TablesAPI(apiClient); @@ -1005,10 +1013,6 @@ public FeatureStoreAPI featureStore() { * HTTP methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their * URI path. The path is always absolute. * - *

Some Files API client features are currently experimental. To enable them, set - * `enable_experimental_files_api_client = True` in your configuration profile or use the - * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. - * *

Use of Files API may incur Databricks data transfer charges. * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html @@ -1298,8 +1302,8 @@ public PermissionMigrationAPI permissionMigration() { * which users can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage * which users can manage, restart, or attach to clusters. * **[Cluster policy * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. * - * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view, - * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job + * **[Spark Declarative Pipelines permissions](:service:pipelines)** — Manage which users can + * view, manage, run, cancel, or own a Spark Declarative Pipeline. * **[Job * permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a * job. * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, * edit, or manage MLflow experiments. * **[MLflow registered model @@ -1712,6 +1716,18 @@ public SecretsExt secrets() { return secretsAPI; } + /** + * A secret is a Unity Catalog securable object that stores sensitive credential data (such as + * passwords, tokens, and keys) within a three-level namespace + * (**catalog_name.schema_name.secret_name**). + * + *

Secrets can be managed using standard Unity Catalog permissions and are scoped to a schema + * within a catalog. + */ + public SecretsUcAPI secretsUc() { + return secretsUcAPI; + } + /** * These APIs enable administrators to manage service principal secrets at the workspace level. To * use these APIs, the service principal must be first added to the current workspace. @@ -1897,6 +1913,11 @@ public StorageCredentialsAPI storageCredentials() { return storageCredentialsAPI; } + /** Manage Supervisor Agents and related resources. */ + public SupervisorAgentsAPI supervisorAgents() { + return supervisorAgentsAPI; + } + /** * A system schema is a schema that lives within the system catalog. A system schema may contain * information about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage @@ -1963,10 +1984,10 @@ public TagPoliciesAPI tagPolicies() { * a metastore admin needs to enable the external_access_enabled flag (off by default) at the * metastore level. A user needs to be granted the EXTERNAL USE LOCATION permission by external * location owner. For requests on existing external tables, user also needs to be granted the - * EXTERNAL USE SCHEMA permission at the schema level by catalog admin. + * EXTERNAL USE SCHEMA permission at the schema level by catalog owner. * *

Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by - * catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the + * catalog owner explicitly and is not included in schema ownership or ALL PRIVILEGES on the * schema for security reasons. Similarly, EXTERNAL USE LOCATION is an external location level * permission that can only be granted by external location owner explicitly and is not included * in external location ownership or ALL PRIVILEGES on the external location for security reasons. @@ -1990,8 +2011,8 @@ public TemporaryPathCredentialsAPI temporaryPathCredentials() { * reducing the risk of unauthorized access or misuse. To use the temporary table credentials API, * a metastore admin needs to enable the external_access_enabled flag (off by default) at the * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema - * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can - * only be granted by catalog admin explicitly and is not included in schema ownership or ALL + * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can + * only be granted by catalog owner explicitly and is not included in schema ownership or ALL * PRIVILEGES on the schema for security reasons. */ public TemporaryTableCredentialsAPI temporaryTableCredentials() { @@ -3293,6 +3314,17 @@ public WorkspaceClient withSecretsAPI(SecretsExt secrets) { return this; } + /** Replace the default SecretsUcService with a custom implementation. */ + public WorkspaceClient withSecretsUcImpl(SecretsUcService secretsUc) { + return this.withSecretsUcAPI(new SecretsUcAPI(secretsUc)); + } + + /** Replace the default SecretsUcAPI with a custom implementation. */ + public WorkspaceClient withSecretsUcAPI(SecretsUcAPI secretsUc) { + this.secretsUcAPI = secretsUc; + return this; + } + /** Replace the default ServicePrincipalSecretsProxyService with a custom implementation. */ public WorkspaceClient withServicePrincipalSecretsProxyImpl( ServicePrincipalSecretsProxyService servicePrincipalSecretsProxy) { @@ -3388,6 +3420,17 @@ public WorkspaceClient withStorageCredentialsAPI(StorageCredentialsAPI storageCr return this; } + /** Replace the default SupervisorAgentsService with a custom implementation. */ + public WorkspaceClient withSupervisorAgentsImpl(SupervisorAgentsService supervisorAgents) { + return this.withSupervisorAgentsAPI(new SupervisorAgentsAPI(supervisorAgents)); + } + + /** Replace the default SupervisorAgentsAPI with a custom implementation. */ + public WorkspaceClient withSupervisorAgentsAPI(SupervisorAgentsAPI supervisorAgents) { + this.supervisorAgentsAPI = supervisorAgents; + return this; + } + /** Replace the default SystemSchemasService with a custom implementation. */ public WorkspaceClient withSystemSchemasImpl(SystemSchemasService systemSchemas) { return this.withSystemSchemasAPI(new SystemSchemasAPI(systemSchemas)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java index 4b5c6bb3e..8784fa394 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java @@ -8,7 +8,6 @@ import java.util.Map; import java.util.Objects; -/** Next ID: 25 */ @Generated public class ConnectionInfo { /** User-provided free-form text description. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index a289a2e5a..8136de14c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 75 */ +/** Next Id: 77 */ @Generated public enum ConnectionType { BIGQUERY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSecretRequest.java new file mode 100755 index 000000000..f62a5fb62 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSecretRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateSecretRequest { + /** + * The secret object to create. The **name**, **catalog_name**, **schema_name**, and **value** + * fields are required. + */ + @JsonProperty("secret") + private Secret secret; + + public CreateSecretRequest setSecret(Secret secret) { + this.secret = secret; + return this; + } + + public Secret getSecret() { + return secret; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSecretRequest that = (CreateSecretRequest) o; + return Objects.equals(secret, that.secret); + } + + @Override + public int hashCode() { + return Objects.hash(secret); + } + + @Override + public String toString() { + return new ToStringer(CreateSecretRequest.class).add("secret", secret).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSecretRequest.java new file mode 100755 index 000000000..6ad224c23 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSecretRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteSecretRequest { + /** + * The three-level (fully qualified) name of the secret (for example, + * **catalog_name.schema_name.secret_name**). + */ + @JsonIgnore private String fullName; + + public DeleteSecretRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSecretRequest that = (DeleteSecretRequest) o; + return Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(fullName); + } + + @Override + public String toString() { + return new ToStringer(DeleteSecretRequest.class).add("fullName", fullName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java index 11a937f7d..00f9f76e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java @@ -9,7 +9,7 @@ /** * A dependency of a SQL object. One of the following fields must be defined: __table__, - * __function__, __connection__, or __credential__. + * __function__, __connection__, __credential__, __volume__, or __secret__. */ @Generated public class Dependency { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSecretRequest.java new file mode 100755 index 000000000..2e520e4c5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSecretRequest.java @@ -0,0 +1,66 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetSecretRequest { + /** + * The three-level (fully qualified) name of the secret (for example, + * **catalog_name.schema_name.secret_name**). + */ + @JsonIgnore private String fullName; + + /** + * Whether to include secrets in the response for which you only have the **BROWSE** privilege, + * which limits access to metadata. + */ + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + public GetSecretRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public GetSecretRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSecretRequest that = (GetSecretRequest) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, includeBrowse); + } + + @Override + public String toString() { + return new ToStringer(GetSecretRequest.class) + .add("fullName", fullName) + .add("includeBrowse", includeBrowse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsRequest.java new file mode 100755 index 000000000..723c6f626 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsRequest.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListSecretsRequest { + /** + * The name of the catalog under which to list secrets. Both **catalog_name** and **schema_name** + * must be specified together. + */ + @JsonIgnore + @QueryParam("catalog_name") + private String catalogName; + + /** + * Whether to include secrets in the response for which you only have the **BROWSE** privilege, + * which limits access to metadata. + */ + @JsonIgnore + @QueryParam("include_browse") + private Boolean includeBrowse; + + /** + * Maximum number of secrets to return. + * + *

- If not specified, at most 10000 secrets are returned. - If set to a value greater than 0, + * the page length is the minimum of this value and 10000. - If set to 0, the page length is set + * to 10000. - If set to a value less than 0, an invalid parameter error is returned. + */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * Opaque pagination token to go to the next page based on previous query. The maximum page length + * is determined by a server configured value. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** + * The name of the schema under which to list secrets. Both **catalog_name** and **schema_name** + * must be specified together. + */ + @JsonIgnore + @QueryParam("schema_name") + private String schemaName; + + public ListSecretsRequest setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public ListSecretsRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + public ListSecretsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListSecretsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListSecretsRequest setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSecretsRequest that = (ListSecretsRequest) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(schemaName, that.schemaName); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, includeBrowse, pageSize, pageToken, schemaName); + } + + @Override + public String toString() { + return new ToStringer(ListSecretsRequest.class) + .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("schemaName", schemaName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsResponse.java new file mode 100755 index 000000000..96b81a1e6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSecretsResponse.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Response message for ListSecrets. */ +@Generated +public class ListSecretsResponse { + /** + * Opaque token to retrieve the next page of results. Absent if there are no more pages. + * **page_token** should be set to this value for the next request. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** An array of secret objects. */ + @JsonProperty("secrets") + private Collection secrets; + + public ListSecretsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListSecretsResponse setSecrets(Collection secrets) { + this.secrets = secrets; + return this; + } + + public Collection getSecrets() { + return secrets; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSecretsResponse that = (ListSecretsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(secrets, that.secrets); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, secrets); + } + + @Override + public String toString() { + return new ToStringer(ListSecretsResponse.class) + .add("nextPageToken", nextPageToken) + .add("secrets", secrets) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Secret.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Secret.java new file mode 100755 index 000000000..a20759b2c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Secret.java @@ -0,0 +1,331 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +/** + * A secret stored in Unity Catalog. Secrets are three-level namespace objects + * (catalog.schema.secret) that securely store sensitive credential data such as passwords, tokens, + * and keys. + */ +@Generated +public class Secret { + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the **BROWSE** privilege when **include_browse** is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + + /** The name of the catalog where the schema and the secret reside. */ + @JsonProperty("catalog_name") + private String catalogName; + + /** User-provided free-form text description of the secret. */ + @JsonProperty("comment") + private String comment; + + /** The time at which this secret was created. */ + @JsonProperty("create_time") + private Timestamp createTime; + + /** The principal that created the secret. */ + @JsonProperty("created_by") + private String createdBy; + + /** + * The effective owner of the secret, which may differ from the directly-set **owner** due to + * inheritance. + */ + @JsonProperty("effective_owner") + private String effectiveOwner; + + /** + * The secret value. Only populated in responses when you have the **READ_SECRET** privilege and + * **include_value** is set to true in the request. The maximum size is 60 KiB. + */ + @JsonProperty("effective_value") + private String effectiveValue; + + /** + * User-provided expiration time of the secret. This field indicates when the secret should no + * longer be used and may be displayed as a warning in the UI. It is purely informational and does + * not trigger any automatic actions or affect the secret's lifecycle. + */ + @JsonProperty("expire_time") + private Timestamp expireTime; + + /** */ + @JsonProperty("external_secret_id") + private String externalSecretId; + + /** + * The three-level (fully qualified) name of the secret, in the form of + * **catalog_name.schema_name.secret_name**. + */ + @JsonProperty("full_name") + private String fullName; + + /** Unique identifier of the metastore hosting the secret. */ + @JsonProperty("metastore_id") + private String metastoreId; + + /** The name of the secret, relative to its parent schema. */ + @JsonProperty("name") + private String name; + + /** + * The owner of the secret. Defaults to the creating principal on creation. Can be updated to + * transfer ownership of the secret to another principal. + */ + @JsonProperty("owner") + private String owner; + + /** The name of the schema where the secret resides. */ + @JsonProperty("schema_name") + private String schemaName; + + /** The time at which this secret was last updated. */ + @JsonProperty("update_time") + private Timestamp updateTime; + + /** The principal that last updated the secret. */ + @JsonProperty("updated_by") + private String updatedBy; + + /** + * The secret value to store. This field is input-only and is not returned in responses — use the + * **effective_value** field (via GetSecret with **include_value** set to true) to read the secret + * value. The maximum size is 60 KiB (pre-encryption). Accepted content includes passwords, + * tokens, keys, and other sensitive credential data. + */ + @JsonProperty("value") + private String value; + + public Secret setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + + public Secret setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public Secret setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public Secret setCreateTime(Timestamp createTime) { + this.createTime = createTime; + return this; + } + + public Timestamp getCreateTime() { + return createTime; + } + + public Secret setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public Secret setEffectiveOwner(String effectiveOwner) { + this.effectiveOwner = effectiveOwner; + return this; + } + + public String getEffectiveOwner() { + return effectiveOwner; + } + + public Secret setEffectiveValue(String effectiveValue) { + this.effectiveValue = effectiveValue; + return this; + } + + public String getEffectiveValue() { + return effectiveValue; + } + + public Secret setExpireTime(Timestamp expireTime) { + this.expireTime = expireTime; + return this; + } + + public Timestamp getExpireTime() { + return expireTime; + } + + public Secret setExternalSecretId(String externalSecretId) { + this.externalSecretId = externalSecretId; + return this; + } + + public String getExternalSecretId() { + return externalSecretId; + } + + public Secret setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public Secret setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public Secret setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Secret setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public Secret setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public Secret setUpdateTime(Timestamp updateTime) { + this.updateTime = updateTime; + return this; + } + + public Timestamp getUpdateTime() { + return updateTime; + } + + public Secret setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public Secret setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Secret that = (Secret) o; + return Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(comment, that.comment) + && Objects.equals(createTime, that.createTime) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(effectiveOwner, that.effectiveOwner) + && Objects.equals(effectiveValue, that.effectiveValue) + && Objects.equals(expireTime, that.expireTime) + && Objects.equals(externalSecretId, that.externalSecretId) + && Objects.equals(fullName, that.fullName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash( + browseOnly, + catalogName, + comment, + createTime, + createdBy, + effectiveOwner, + effectiveValue, + expireTime, + externalSecretId, + fullName, + metastoreId, + name, + owner, + schemaName, + updateTime, + updatedBy, + value); + } + + @Override + public String toString() { + return new ToStringer(Secret.class) + .add("browseOnly", browseOnly) + .add("catalogName", catalogName) + .add("comment", comment) + .add("createTime", createTime) + .add("createdBy", createdBy) + .add("effectiveOwner", effectiveOwner) + .add("effectiveValue", effectiveValue) + .add("expireTime", expireTime) + .add("externalSecretId", externalSecretId) + .add("fullName", fullName) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("schemaName", schemaName) + .add("updateTime", updateTime) + .add("updatedBy", updatedBy) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java new file mode 100755 index 000000000..6a951567b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcAPI.java @@ -0,0 +1,117 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A secret is a Unity Catalog securable object that stores sensitive credential data (such as + * passwords, tokens, and keys) within a three-level namespace + * (**catalog_name.schema_name.secret_name**). + * + *

Secrets can be managed using standard Unity Catalog permissions and are scoped to a schema + * within a catalog. + */ +@Generated +public class SecretsUcAPI { + private static final Logger LOG = LoggerFactory.getLogger(SecretsUcAPI.class); + + private final SecretsUcService impl; + + /** Regular-use constructor */ + public SecretsUcAPI(ApiClient apiClient) { + impl = new SecretsUcImpl(apiClient); + } + + /** Constructor for mocks */ + public SecretsUcAPI(SecretsUcService mock) { + impl = mock; + } + + /** + * Creates a new secret in Unity Catalog. + * + *

You must be the owner of the parent schema or have the **CREATE_SECRET** and **USE SCHEMA** + * privileges on the parent schema and **USE CATALOG** on the parent catalog. + * + *

The secret is stored in the specified catalog and schema, and the **value** field contains + * the sensitive data to be securely stored. + */ + public Secret createSecret(CreateSecretRequest request) { + return impl.createSecret(request); + } + + public void deleteSecret(String fullName) { + deleteSecret(new DeleteSecretRequest().setFullName(fullName)); + } + + /** + * Deletes a secret by its three-level (fully qualified) name. + * + *

You must be the owner of the secret or a metastore admin. + */ + public void deleteSecret(DeleteSecretRequest request) { + impl.deleteSecret(request); + } + + public Secret getSecret(String fullName) { + return getSecret(new GetSecretRequest().setFullName(fullName)); + } + + /** + * Gets a secret by its three-level (fully qualified) name. + * + *

You must be a metastore admin, the owner of the secret, or have the **MANAGE** privilege on + * the secret. + * + *

The secret value isn't returned by default. To retrieve it, you must also have the + * **READ_SECRET** privilege and set **include_value** to true in the request. + */ + public Secret getSecret(GetSecretRequest request) { + return impl.getSecret(request); + } + + /** + * Lists secrets in Unity Catalog. + * + *

You must be a metastore admin, the owner of the secret, or have the **MANAGE** privilege on + * the secret. + * + *

Both **catalog_name** and **schema_name** must be specified together to filter secrets + * within a specific schema. Results are paginated; use the **page_token** field from the response + * to retrieve subsequent pages. + */ + public Iterable listSecrets(ListSecretsRequest request) { + return new Paginator<>( + request, + impl::listSecrets, + ListSecretsResponse::getSecrets, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** + * Updates an existing secret in Unity Catalog. + * + *

You must be the owner of the secret or a metastore admin. If you are a metastore admin, only + * the **owner** field can be changed. + * + *

Use the **update_mask** field to specify which fields to update. Supported updatable fields + * include **value**, **comment**, **owner**, and **expire_time**. + */ + public Secret updateSecret(UpdateSecretRequest request) { + return impl.updateSecret(request); + } + + public SecretsUcService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcImpl.java new file mode 100755 index 000000000..5d8de8806 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcImpl.java @@ -0,0 +1,105 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of SecretsUc */ +@Generated +class SecretsUcImpl implements SecretsUcService { + private final ApiClient apiClient; + + public SecretsUcImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public Secret createSecret(CreateSecretRequest request) { + String path = "/api/2.1/unity-catalog/secrets"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getSecret())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Secret.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteSecret(DeleteSecretRequest request) { + String path = String.format("/api/2.1/unity-catalog/secrets/%s", request.getFullName()); + try { + Request req = new Request("DELETE", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Secret getSecret(GetSecretRequest request) { + String path = String.format("/api/2.1/unity-catalog/secrets/%s", request.getFullName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Secret.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListSecretsResponse listSecrets(ListSecretsRequest request) { + String path = "/api/2.1/unity-catalog/secrets"; + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, ListSecretsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Secret updateSecret(UpdateSecretRequest request) { + String path = String.format("/api/2.1/unity-catalog/secrets/%s", request.getFullName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getSecret())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Secret.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcService.java new file mode 100755 index 000000000..8e753c35b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecretsUcService.java @@ -0,0 +1,71 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * A secret is a Unity Catalog securable object that stores sensitive credential data (such as + * passwords, tokens, and keys) within a three-level namespace + * (**catalog_name.schema_name.secret_name**). + * + *

Secrets can be managed using standard Unity Catalog permissions and are scoped to a schema + * within a catalog. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface SecretsUcService { + /** + * Creates a new secret in Unity Catalog. + * + *

You must be the owner of the parent schema or have the **CREATE_SECRET** and **USE SCHEMA** + * privileges on the parent schema and **USE CATALOG** on the parent catalog. + * + *

The secret is stored in the specified catalog and schema, and the **value** field contains + * the sensitive data to be securely stored. + */ + Secret createSecret(CreateSecretRequest createSecretRequest); + + /** + * Deletes a secret by its three-level (fully qualified) name. + * + *

You must be the owner of the secret or a metastore admin. + */ + void deleteSecret(DeleteSecretRequest deleteSecretRequest); + + /** + * Gets a secret by its three-level (fully qualified) name. + * + *

You must be a metastore admin, the owner of the secret, or have the **MANAGE** privilege on + * the secret. + * + *

The secret value isn't returned by default. To retrieve it, you must also have the + * **READ_SECRET** privilege and set **include_value** to true in the request. + */ + Secret getSecret(GetSecretRequest getSecretRequest); + + /** + * Lists secrets in Unity Catalog. + * + *

You must be a metastore admin, the owner of the secret, or have the **MANAGE** privilege on + * the secret. + * + *

Both **catalog_name** and **schema_name** must be specified together to filter secrets + * within a specific schema. Results are paginated; use the **page_token** field from the response + * to retrieve subsequent pages. + */ + ListSecretsResponse listSecrets(ListSecretsRequest listSecretsRequest); + + /** + * Updates an existing secret in Unity Catalog. + * + *

You must be the owner of the secret or a metastore admin. If you are a metastore admin, only + * the **owner** field can be changed. + * + *

Use the **update_mask** field to specify which fields to update. Supported updatable fields + * include **value**, **comment**, **owner**, and **expire_time**. + */ + Secret updateSecret(UpdateSecretRequest updateSecretRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java index 9db62bb12..bebea1485 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Latest kind: CONNECTION_VEEVA_VAULT_OAUTH_M2M = 311; Next id: 312 */ +/** Latest kind: ENDPOINT_LLM_PROVIDER = 317; Next id: 318 */ @Generated public enum SecurableKind { TABLE_DB_STORAGE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java index a8d040a53..933a88c22 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsAPI.java @@ -19,10 +19,10 @@ * admin needs to enable the external_access_enabled flag (off by default) at the metastore level. A * user needs to be granted the EXTERNAL USE LOCATION permission by external location owner. For * requests on existing external tables, user also needs to be granted the EXTERNAL USE SCHEMA - * permission at the schema level by catalog admin. + * permission at the schema level by catalog owner. * *

Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog - * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for + * owner explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for * security reasons. Similarly, EXTERNAL USE LOCATION is an external location level permission that * can only be granted by external location owner explicitly and is not included in external * location ownership or ALL PRIVILEGES on the external location for security reasons. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java index 9a43feb56..4d1b81ece 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryPathCredentialsService.java @@ -16,10 +16,10 @@ * admin needs to enable the external_access_enabled flag (off by default) at the metastore level. A * user needs to be granted the EXTERNAL USE LOCATION permission by external location owner. For * requests on existing external tables, user also needs to be granted the EXTERNAL USE SCHEMA - * permission at the schema level by catalog admin. + * permission at the schema level by catalog owner. * *

Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog - * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for + * owner explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for * security reasons. Similarly, EXTERNAL USE LOCATION is an external location level permission that * can only be granted by external location owner explicitly and is not included in external * location ownership or ALL PRIVILEGES on the external location for security reasons. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java index 2d7da4059..5a53f265a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java @@ -18,8 +18,8 @@ * the risk of unauthorized access or misuse. To use the temporary table credentials API, a * metastore admin needs to enable the external_access_enabled flag (off by default) at the * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema - * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only - * be granted by catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES + * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can only + * be granted by catalog owner explicitly and is not included in schema ownership or ALL PRIVILEGES * on the schema for security reasons. */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java index 10a02b1cd..f5057721c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java @@ -15,8 +15,8 @@ * the risk of unauthorized access or misuse. To use the temporary table credentials API, a * metastore admin needs to enable the external_access_enabled flag (off by default) at the * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema - * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only - * be granted by catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES + * level by catalog owner. Note that EXTERNAL USE SCHEMA is a schema level permission that can only + * be granted by catalog owner explicitly and is not included in schema ownership or ALL PRIVILEGES * on the schema for security reasons. * *

This is the high-level interface, that contains generated methods. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSecretRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSecretRequest.java new file mode 100755 index 000000000..0e7298448 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSecretRequest.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.FieldMask; +import java.util.Objects; + +@Generated +public class UpdateSecretRequest { + /** + * The three-level (fully qualified) name of the secret (for example, + * **catalog_name.schema_name.secret_name**). + */ + @JsonIgnore private String fullName; + + /** + * The secret object containing the fields to update. Only fields specified in **update_mask** + * will be updated. + */ + @JsonProperty("secret") + private Secret secret; + + /** + * The field mask specifying which fields of the secret to update. Supported fields: **value**, + * **comment**, **owner**, **expire_time**. + */ + @JsonIgnore + @QueryParam("update_mask") + private FieldMask updateMask; + + public UpdateSecretRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public UpdateSecretRequest setSecret(Secret secret) { + this.secret = secret; + return this; + } + + public Secret getSecret() { + return secret; + } + + public UpdateSecretRequest setUpdateMask(FieldMask updateMask) { + this.updateMask = updateMask; + return this; + } + + public FieldMask getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateSecretRequest that = (UpdateSecretRequest) o; + return Objects.equals(fullName, that.fullName) + && Objects.equals(secret, that.secret) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, secret, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateSecretRequest.class) + .add("fullName", fullName) + .add("secret", secret) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java index dc6582a1f..ed2aff3d6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -10,7 +10,7 @@ /** * The environment entity used to preserve serverless environment side panel, jobs' environment for - * non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal + * non-notebook task, and SDP's environment for classic and serverless pipelines. In this minimal * environment spec, only pip and java dependencies are supported. */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java index 57952042f..7f3fa2556 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java @@ -45,7 +45,7 @@ public PolicyComplianceForClustersAPI(PolicyComplianceForClustersService mock) { *

If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next * time the cluster is started, the new attributes will take effect. * - *

Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this + *

Clusters created by the Databricks Jobs, SDP, or Models services cannot be enforced by this * API. Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs. */ public EnforceClusterComplianceResponse enforceCompliance( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersService.java index c7a70552e..cd613370b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersService.java @@ -31,7 +31,7 @@ public interface PolicyComplianceForClustersService { *

If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next * time the cluster is started, the new attributes will take effect. * - *

Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this + *

Clusters created by the Databricks Jobs, SDP, or Models services cannot be enforced by this * API. Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs. */ EnforceClusterComplianceResponse enforceCompliance( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java index 30992dbc6..6c0a4fce3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java @@ -13,6 +13,13 @@ public class GenieSpace { @JsonProperty("description") private String description; + /** + * ETag for this space. Pass this value back in the update request to prevent overwriting + * concurrent changes. + */ + @JsonProperty("etag") + private String etag; + /** Parent folder path of the Genie Space */ @JsonProperty("parent_path") private String parentPath; @@ -47,6 +54,15 @@ public String getDescription() { return description; } + public GenieSpace setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + public GenieSpace setParentPath(String parentPath) { this.parentPath = parentPath; return this; @@ -98,6 +114,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GenieSpace that = (GenieSpace) o; return Objects.equals(description, that.description) + && Objects.equals(etag, that.etag) && Objects.equals(parentPath, that.parentPath) && Objects.equals(serializedSpace, that.serializedSpace) && Objects.equals(spaceId, that.spaceId) @@ -107,13 +124,15 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(description, parentPath, serializedSpace, spaceId, title, warehouseId); + return Objects.hash( + description, etag, parentPath, serializedSpace, spaceId, title, warehouseId); } @Override public String toString() { return new ToStringer(GenieSpace.class) .add("description", description) + .add("etag", etag) .add("parentPath", parentPath) .add("serializedSpace", serializedSpace) .add("spaceId", spaceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java index 938ac63ba..e81c46a49 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java @@ -14,6 +14,13 @@ public class GenieUpdateSpaceRequest { @JsonProperty("description") private String description; + /** + * ETag returned by a previous GET or UPDATE. When set, the update will fail if the space has been + * modified since. Omit to apply the update unconditionally. + */ + @JsonProperty("etag") + private String etag; + /** * The contents of the Genie Space in serialized string form (full replacement). Use the [Get * Genie Space](:method:genie/getspace) API to retrieve an example response, which includes the @@ -43,6 +50,15 @@ public String getDescription() { return description; } + public GenieUpdateSpaceRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + public GenieUpdateSpaceRequest setSerializedSpace(String serializedSpace) { this.serializedSpace = serializedSpace; return this; @@ -85,6 +101,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GenieUpdateSpaceRequest that = (GenieUpdateSpaceRequest) o; return Objects.equals(description, that.description) + && Objects.equals(etag, that.etag) && Objects.equals(serializedSpace, that.serializedSpace) && Objects.equals(spaceId, that.spaceId) && Objects.equals(title, that.title) @@ -93,13 +110,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(description, serializedSpace, spaceId, title, warehouseId); + return Objects.hash(description, etag, serializedSpace, spaceId, title, warehouseId); } @Override public String toString() { return new ToStringer(GenieUpdateSpaceRequest.class) .add("description", description) + .add("etag", etag) .add("serializedSpace", serializedSpace) .add("spaceId", spaceId) .add("title", title) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java index 25ff5955c..cf0dc64b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java @@ -14,7 +14,14 @@ public class Thought { @JsonProperty("content") private String content; - /** The category of this thought. */ + /** + * The category of this thought. The possible values are: * `THOUGHT_TYPE_DESCRIPTION`: A + * high-level description of how the question was interpreted. * `THOUGHT_TYPE_UNDERSTANDING`: How + * ambiguous parts of the question were resolved. * `THOUGHT_TYPE_DATA_SOURCING`: Which tables or + * datasets were identified as relevant. * `THOUGHT_TYPE_INSTRUCTIONS`: Which author-defined + * instructions were referenced. * `THOUGHT_TYPE_STEPS`: The logical steps taken to compute the + * answer. + */ @JsonProperty("thought_type") private ThoughtType thoughtType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java index 118eacfd9..579389ac9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java @@ -21,10 +21,6 @@ * methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI * path. The path is always absolute. * - *

Some Files API client features are currently experimental. To enable them, set - * `enable_experimental_files_api_client = True` in your configuration profile or use the - * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. - * *

Use of Files API may incur Databricks data transfer charges. * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java index e2d7724f6..86fb44603 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java @@ -17,10 +17,6 @@ * methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI * path. The path is always absolute. * - *

Some Files API client features are currently experimental. To enable them, set - * `enable_experimental_files_api_client = True` in your configuration profile or use the - * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. - * *

Use of Files API may incur Databricks data transfer charges. * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java index 719dbe347..1dae5c2a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java @@ -12,8 +12,8 @@ * can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can * manage, restart, or attach to clusters. * **[Cluster policy * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. * - * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view, - * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job permissions](:service:jobs)** + * **[Spark Declarative Pipelines permissions](:service:pipelines)** — Manage which users can view, + * manage, run, cancel, or own a Spark Declarative Pipeline. * **[Job permissions](:service:jobs)** * — Manage which users can view, manage, trigger, cancel, or own a job. * **[MLflow experiment * permissions](:service:experiments)** — Manage which users can read, edit, or manage MLflow * experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java index 0b011366f..4b3a00859 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java @@ -9,8 +9,8 @@ * can manage or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can * manage, restart, or attach to clusters. * **[Cluster policy * permissions](:service:clusterpolicies)** — Manage which users can use cluster policies. * - * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view, - * manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job permissions](:service:jobs)** + * **[Spark Declarative Pipelines permissions](:service:pipelines)** — Manage which users can view, + * manage, run, cancel, or own a Spark Declarative Pipeline. * **[Job permissions](:service:jobs)** * — Manage which users can view, manage, trigger, cancel, or own a job. * **[MLflow experiment * permissions](:service:experiments)** — Manage which users can read, edit, or manage MLflow * experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java index 23c06ff1f..1edadfb6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java @@ -49,6 +49,8 @@ */ @Generated public enum TerminationCodeCode { + BREAKING_CHANGE, // Run failed because of an intentional breaking change in Spark, but it will be + // retried with a mitigation config. BUDGET_POLICY_LIMIT_EXCEEDED, CANCELED, // The run was canceled during execution by the platform; for // example, if the maximum run duration was exceeded. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java index dc5715ca0..5ab805508 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java @@ -29,7 +29,7 @@ public class ClonePipelineRequest { @JsonProperty("catalog") private String catalog; - /** DLT Release Channel that specifies which version to use. */ + /** SDP Release Channel that specifies which version to use. */ @JsonProperty("channel") private String channel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index 9d6fdfd31..6dde0dcad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -28,7 +28,7 @@ public class CreatePipeline { @JsonProperty("catalog") private String catalog; - /** DLT Release Channel that specifies which version to use. */ + /** SDP Release Channel that specifies which version to use. */ @JsonProperty("channel") private String channel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 6cc967ec2..d8dd8a40a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -31,7 +31,7 @@ public class EditPipeline { @JsonProperty("catalog") private String catalog; - /** DLT Release Channel that specifies which version to use. */ + /** SDP Release Channel that specifies which version to use. */ @JsonProperty("channel") private String channel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index 78977dc16..11f3a3261 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -24,7 +24,7 @@ public class PipelineSpec { @JsonProperty("catalog") private String catalog; - /** DLT Release Channel that specifies which version to use. */ + /** SDP Release Channel that specifies which version to use. */ @JsonProperty("channel") private String channel; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java index 88c63f2b7..108d44c90 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java @@ -10,7 +10,7 @@ /** * The environment entity used to preserve serverless environment side panel, jobs' environment for - * non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal + * non-notebook task, and SDP's environment for classic and serverless pipelines. In this minimal * environment spec, only pip dependencies are supported. */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatus.java index bd3cd79ac..584b6ebcf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatus.java @@ -10,6 +10,17 @@ @Generated public class BranchStatus { + /** + * The short identifier of the branch, suitable for showing to the users. For a branch with name + * `projects/my-project/branches/my-branch`, the branch_id is `my-branch`. + * + *

Use this field when building UI components that display branches to users (e.g., a drop-down + * selector). Prefer showing `branch_id` instead of the full resource name from `Branch.name`, + * which follows the `projects/{project_id}/branches/{branch_id}` format and is not user-friendly. + */ + @JsonProperty("branch_id") + private String branchId; + /** The branch's state, indicating if it is initializing, ready for use, or archived. */ @JsonProperty("current_state") private BranchStatusState currentState; @@ -53,6 +64,15 @@ public class BranchStatus { @JsonProperty("state_change_time") private Timestamp stateChangeTime; + public BranchStatus setBranchId(String branchId) { + this.branchId = branchId; + return this; + } + + public String getBranchId() { + return branchId; + } + public BranchStatus setCurrentState(BranchStatusState currentState) { this.currentState = currentState; return this; @@ -148,7 +168,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BranchStatus that = (BranchStatus) o; - return Objects.equals(currentState, that.currentState) + return Objects.equals(branchId, that.branchId) + && Objects.equals(currentState, that.currentState) && Objects.equals(defaultValue, that.defaultValue) && Objects.equals(expireTime, that.expireTime) && Objects.equals(isProtected, that.isProtected) @@ -163,6 +184,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + branchId, currentState, defaultValue, expireTime, @@ -178,6 +200,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(BranchStatus.class) + .add("branchId", branchId) .add("currentState", currentState) .add("defaultValue", defaultValue) .add("expireTime", expireTime) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CatalogCatalogStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CatalogCatalogStatus.java index 7e39a11b2..a8f0eb3aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CatalogCatalogStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CatalogCatalogStatus.java @@ -18,6 +18,17 @@ public class CatalogCatalogStatus { @JsonProperty("branch") private String branch; + /** + * The short identifier of the catalog, suitable for showing to the users. For a catalog with name + * `catalogs/my-catalog`, the catalog_id is `my-catalog`. + * + *

Use this field when building UI components that display catalogs to users (e.g., a drop-down + * selector). Prefer showing `catalog_id` instead of the full resource name from `Catalog.name`, + * which follows the `catalogs/{catalog_id}` format and is not user-friendly. + */ + @JsonProperty("catalog_id") + private String catalogId; + /** The name of the Postgres database associated with the catalog. */ @JsonProperty("postgres_database") private String postgresDatabase; @@ -39,6 +50,15 @@ public String getBranch() { return branch; } + public CatalogCatalogStatus setCatalogId(String catalogId) { + this.catalogId = catalogId; + return this; + } + + public String getCatalogId() { + return catalogId; + } + public CatalogCatalogStatus setPostgresDatabase(String postgresDatabase) { this.postgresDatabase = postgresDatabase; return this; @@ -63,19 +83,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CatalogCatalogStatus that = (CatalogCatalogStatus) o; return Objects.equals(branch, that.branch) + && Objects.equals(catalogId, that.catalogId) && Objects.equals(postgresDatabase, that.postgresDatabase) && Objects.equals(project, that.project); } @Override public int hashCode() { - return Objects.hash(branch, postgresDatabase, project); + return Objects.hash(branch, catalogId, postgresDatabase, project); } @Override public String toString() { return new ToStringer(CatalogCatalogStatus.class) .add("branch", branch) + .add("catalogId", catalogId) .add("postgresDatabase", postgresDatabase) .add("project", project) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabaseDatabaseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabaseDatabaseStatus.java index 80a25ed7b..53aec7344 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabaseDatabaseStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabaseDatabaseStatus.java @@ -9,6 +9,19 @@ @Generated public class DatabaseDatabaseStatus { + /** + * The short identifier of the database, suitable for showing to the users. For a database with + * name `projects/my-project/branches/my-branch/databases/my-db`, the database_id is `my-db`. + * + *

Use this field when building UI components that display databases to users (e.g., a + * drop-down selector). Prefer showing `database_id` instead of the full resource name from + * `Database.name`, which follows the + * `projects/{project_id}/branches/{branch_id}/databases/{database_id}` format and is not + * user-friendly. + */ + @JsonProperty("database_id") + private String databaseId; + /** The name of the Postgres database. */ @JsonProperty("postgres_database") private String postgresDatabase; @@ -20,6 +33,15 @@ public class DatabaseDatabaseStatus { @JsonProperty("role") private String role; + public DatabaseDatabaseStatus setDatabaseId(String databaseId) { + this.databaseId = databaseId; + return this; + } + + public String getDatabaseId() { + return databaseId; + } + public DatabaseDatabaseStatus setPostgresDatabase(String postgresDatabase) { this.postgresDatabase = postgresDatabase; return this; @@ -43,18 +65,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DatabaseDatabaseStatus that = (DatabaseDatabaseStatus) o; - return Objects.equals(postgresDatabase, that.postgresDatabase) + return Objects.equals(databaseId, that.databaseId) + && Objects.equals(postgresDatabase, that.postgresDatabase) && Objects.equals(role, that.role); } @Override public int hashCode() { - return Objects.hash(postgresDatabase, role); + return Objects.hash(databaseId, postgresDatabase, role); } @Override public String toString() { return new ToStringer(DatabaseDatabaseStatus.class) + .add("databaseId", databaseId) .add("postgresDatabase", postgresDatabase) .add("role", role) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java index b4c3d11b1..34743c1e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java @@ -30,6 +30,20 @@ public class EndpointStatus { @JsonProperty("disabled") private Boolean disabled; + /** + * The short identifier of the endpoint, suitable for showing to the users. For an endpoint with + * name `projects/my-project/branches/my-branch/endpoints/my-endpoint`, the endpoint_id is + * `my-endpoint`. + * + *

Use this field when building UI components that display endpoints to users (e.g., a + * drop-down selector). Prefer showing `endpoint_id` instead of the full resource name from + * `Endpoint.name`, which follows the + * `projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}` format and is not + * user-friendly. + */ + @JsonProperty("endpoint_id") + private String endpointId; + /** The endpoint type. A branch can only have one READ_WRITE endpoint. */ @JsonProperty("endpoint_type") private EndpointType endpointType; @@ -90,6 +104,15 @@ public Boolean getDisabled() { return disabled; } + public EndpointStatus setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + public EndpointStatus setEndpointType(EndpointType endpointType) { this.endpointType = endpointType; return this; @@ -153,6 +176,7 @@ public boolean equals(Object o) { && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) && Objects.equals(currentState, that.currentState) && Objects.equals(disabled, that.disabled) + && Objects.equals(endpointId, that.endpointId) && Objects.equals(endpointType, that.endpointType) && Objects.equals(group, that.group) && Objects.equals(hosts, that.hosts) @@ -168,6 +192,7 @@ public int hashCode() { autoscalingLimitMinCu, currentState, disabled, + endpointId, endpointType, group, hosts, @@ -183,6 +208,7 @@ public String toString() { .add("autoscalingLimitMinCu", autoscalingLimitMinCu) .add("currentState", currentState) .add("disabled", disabled) + .add("endpointId", endpointId) .add("endpointType", endpointType) .add("group", group) .add("hosts", hosts) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java index c01a7b285..477fbc49d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java @@ -51,6 +51,17 @@ public class ProjectStatus { @JsonProperty("pg_version") private Long pgVersion; + /** + * The short identifier of the project, suitable for showing to the users. For a project with name + * `projects/my-project`, the project_id is `my-project`. + * + *

Use this field when building UI components that display projects to users (e.g., a drop-down + * selector). Prefer showing `project_id` instead of the full resource name from `Project.name`, + * which follows the `projects/{project_id}` format and is not user-friendly. + */ + @JsonProperty("project_id") + private String projectId; + /** The current space occupied by the project in storage. */ @JsonProperty("synthetic_storage_size_bytes") private Long syntheticStorageSizeBytes; @@ -146,6 +157,15 @@ public Long getPgVersion() { return pgVersion; } + public ProjectStatus setProjectId(String projectId) { + this.projectId = projectId; + return this; + } + + public String getProjectId() { + return projectId; + } + public ProjectStatus setSyntheticStorageSizeBytes(Long syntheticStorageSizeBytes) { this.syntheticStorageSizeBytes = syntheticStorageSizeBytes; return this; @@ -170,6 +190,7 @@ public boolean equals(Object o) { && Objects.equals(historyRetentionDuration, that.historyRetentionDuration) && Objects.equals(owner, that.owner) && Objects.equals(pgVersion, that.pgVersion) + && Objects.equals(projectId, that.projectId) && Objects.equals(syntheticStorageSizeBytes, that.syntheticStorageSizeBytes); } @@ -186,6 +207,7 @@ public int hashCode() { historyRetentionDuration, owner, pgVersion, + projectId, syntheticStorageSizeBytes); } @@ -202,6 +224,7 @@ public String toString() { .add("historyRetentionDuration", historyRetentionDuration) .add("owner", owner) .add("pgVersion", pgVersion) + .add("projectId", projectId) .add("syntheticStorageSizeBytes", syntheticStorageSizeBytes) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleStatus.java index abb3d9ea0..2a1b91884 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleStatus.java @@ -30,6 +30,18 @@ public class RoleRoleStatus { @JsonProperty("postgres_role") private String postgresRole; + /** + * The short identifier of the role, suitable for showing to the users. For a role with name + * `projects/my-project/branches/my-branch/roles/my-role`, the role_id is `my-role`. + * + *

Use this field when building UI components that display roles to users (e.g., a drop-down + * selector). Prefer showing `role_id` instead of the full resource name from `Role.name`, which + * follows the `projects/{project_id}/branches/{branch_id}/roles/{role_id}` format and is not + * user-friendly. + */ + @JsonProperty("role_id") + private String roleId; + public RoleRoleStatus setAttributes(RoleAttributes attributes) { this.attributes = attributes; return this; @@ -75,6 +87,15 @@ public String getPostgresRole() { return postgresRole; } + public RoleRoleStatus setRoleId(String roleId) { + this.roleId = roleId; + return this; + } + + public String getRoleId() { + return roleId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -84,12 +105,14 @@ public boolean equals(Object o) { && Objects.equals(authMethod, that.authMethod) && Objects.equals(identityType, that.identityType) && Objects.equals(membershipRoles, that.membershipRoles) - && Objects.equals(postgresRole, that.postgresRole); + && Objects.equals(postgresRole, that.postgresRole) + && Objects.equals(roleId, that.roleId); } @Override public int hashCode() { - return Objects.hash(attributes, authMethod, identityType, membershipRoles, postgresRole); + return Objects.hash( + attributes, authMethod, identityType, membershipRoles, postgresRole, roleId); } @Override @@ -100,6 +123,7 @@ public String toString() { .add("identityType", identityType) .add("membershipRoles", membershipRoles) .add("postgresRole", postgresRole) + .add("roleId", roleId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java index 33df9b315..e7dc081bc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableStatus.java @@ -41,6 +41,14 @@ public class SyncedTableSyncedTableStatus { @JsonProperty("pipeline_id") private String pipelineId; + /** + * The full resource name of the project associated with the table. + * + *

Format: "projects/{project_id}". + */ + @JsonProperty("project") + private String project; + /** The current phase of the data synchronization pipeline. */ @JsonProperty("provisioning_phase") private ProvisioningPhase provisioningPhase; @@ -114,6 +122,15 @@ public String getPipelineId() { return pipelineId; } + public SyncedTableSyncedTableStatus setProject(String project) { + this.project = project; + return this; + } + + public String getProject() { + return project; + } + public SyncedTableSyncedTableStatus setProvisioningPhase(ProvisioningPhase provisioningPhase) { this.provisioningPhase = provisioningPhase; return this; @@ -145,6 +162,7 @@ public boolean equals(Object o) { && Objects.equals(message, that.message) && Objects.equals(ongoingSyncProgress, that.ongoingSyncProgress) && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(project, that.project) && Objects.equals(provisioningPhase, that.provisioningPhase) && Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState); } @@ -159,6 +177,7 @@ public int hashCode() { message, ongoingSyncProgress, pipelineId, + project, provisioningPhase, unityCatalogProvisioningState); } @@ -173,6 +192,7 @@ public String toString() { .add("message", message) .add("ongoingSyncProgress", ongoingSyncProgress) .add("pipelineId", pipelineId) + .add("project", project) .add("provisioningPhase", provisioningPhase) .add("unityCatalogProvisioningState", unityCatalogProvisioningState) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java index 2da9d7bf9..e9d1e6a78 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateGcpKeyInfo.java @@ -23,6 +23,13 @@ public class CreateGcpKeyInfo { @JsonProperty("kms_key_id") private String kmsKeyId; + /** + * When true, Databricks will not use OAuth to grant the service account access to the KMS key. + * The customer is responsible for granting access manually. + */ + @JsonProperty("manual") + private Boolean manual; + public CreateGcpKeyInfo setGcpServiceAccount(GcpServiceAccount gcpServiceAccount) { this.gcpServiceAccount = gcpServiceAccount; return this; @@ -41,18 +48,28 @@ public String getKmsKeyId() { return kmsKeyId; } + public CreateGcpKeyInfo setManual(Boolean manual) { + this.manual = manual; + return this; + } + + public Boolean getManual() { + return manual; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateGcpKeyInfo that = (CreateGcpKeyInfo) o; return Objects.equals(gcpServiceAccount, that.gcpServiceAccount) - && Objects.equals(kmsKeyId, that.kmsKeyId); + && Objects.equals(kmsKeyId, that.kmsKeyId) + && Objects.equals(manual, that.manual); } @Override public int hashCode() { - return Objects.hash(gcpServiceAccount, kmsKeyId); + return Objects.hash(gcpServiceAccount, kmsKeyId, manual); } @Override @@ -60,6 +77,7 @@ public String toString() { return new ToStringer(CreateGcpKeyInfo.class) .add("gcpServiceAccount", gcpServiceAccount) .add("kmsKeyId", kmsKeyId) + .add("manual", manual) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java index a9c6d4ad7..a1fa6e14c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysAPI.java @@ -51,6 +51,16 @@ public EncryptionKeysAPI(EncryptionKeysService mock) { * *

This operation is available only if your account is on the E2 version of the platform or on * a select custom plan that allows multiple workspaces per account. + * + *

**GCP only**: To create a customer-managed key on GCP, you must include the + * `X-Databricks-GCP-SA-Access-Token` HTTP header in your request. This header must contain a + * Google Cloud OAuth access token with the `cloud-platform` scope. The Google identity associated + * with the token must also have the `setIamPermissions` and `getIamPermissions` IAM permissions + * on the key resource. For details on obtaining this token, see [Authenticate with Google ID + * tokens]. + * + *

[Authenticate with Google ID tokens]: + * https://docs.databricks.com/gcp/en/dev-tools/auth/authentication-google-id.html */ public CustomerManagedKey create(CreateCustomerManagedKeyRequest request) { return impl.create(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java index e9741ccb3..748178cc2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java @@ -39,6 +39,16 @@ public interface EncryptionKeysService { * *

This operation is available only if your account is on the E2 version of the platform or on * a select custom plan that allows multiple workspaces per account. + * + *

**GCP only**: To create a customer-managed key on GCP, you must include the + * `X-Databricks-GCP-SA-Access-Token` HTTP header in your request. This header must contain a + * Google Cloud OAuth access token with the `cloud-platform` scope. The Google identity associated + * with the token must also have the `setIamPermissions` and `getIamPermissions` IAM permissions + * on the key resource. For details on obtaining this token, see [Authenticate with Google ID + * tokens]. + * + *

[Authenticate with Google ID tokens]: + * https://docs.databricks.com/gcp/en/dev-tools/auth/authentication-google-id.html */ CustomerManagedKey create(CreateCustomerManagedKeyRequest createCustomerManagedKeyRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java index 19d9d692d..a49661077 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java @@ -23,6 +23,13 @@ public class GcpKeyInfo { @JsonProperty("kms_key_id") private String kmsKeyId; + /** + * When true, Databricks will not use OAuth to grant the service account access to the KMS key. + * The customer is responsible for granting access manually. + */ + @JsonProperty("manual") + private Boolean manual; + public GcpKeyInfo setGcpServiceAccount(GcpServiceAccount gcpServiceAccount) { this.gcpServiceAccount = gcpServiceAccount; return this; @@ -41,18 +48,28 @@ public String getKmsKeyId() { return kmsKeyId; } + public GcpKeyInfo setManual(Boolean manual) { + this.manual = manual; + return this; + } + + public Boolean getManual() { + return manual; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GcpKeyInfo that = (GcpKeyInfo) o; return Objects.equals(gcpServiceAccount, that.gcpServiceAccount) - && Objects.equals(kmsKeyId, that.kmsKeyId); + && Objects.equals(kmsKeyId, that.kmsKeyId) + && Objects.equals(manual, that.manual); } @Override public int hashCode() { - return Objects.hash(gcpServiceAccount, kmsKeyId); + return Objects.hash(gcpServiceAccount, kmsKeyId, manual); } @Override @@ -60,6 +77,7 @@ public String toString() { return new ToStringer(GcpKeyInfo.class) .add("gcpServiceAccount", gcpServiceAccount) .add("kmsKeyId", kmsKeyId) + .add("manual", manual) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.java new file mode 100755 index 000000000..b9a879c16 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CustomerFacingIngressNetworkPolicyAppsRuntimeDestination { + /** Must be set to true. */ + @JsonProperty("all_destinations") + private Boolean allDestinations; + + public CustomerFacingIngressNetworkPolicyAppsRuntimeDestination setAllDestinations( + Boolean allDestinations) { + this.allDestinations = allDestinations; + return this; + } + + public Boolean getAllDestinations() { + return allDestinations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomerFacingIngressNetworkPolicyAppsRuntimeDestination that = + (CustomerFacingIngressNetworkPolicyAppsRuntimeDestination) o; + return Objects.equals(allDestinations, that.allDestinations); + } + + @Override + public int hashCode() { + return Objects.hash(allDestinations); + } + + @Override + public String toString() { + return new ToStringer(CustomerFacingIngressNetworkPolicyAppsRuntimeDestination.class) + .add("allDestinations", allDestinations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.java new file mode 100755 index 000000000..8e54c42c9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination { + /** Must be set to true. */ + @JsonProperty("all_destinations") + private Boolean allDestinations; + + public CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination setAllDestinations( + Boolean allDestinations) { + this.allDestinations = allDestinations; + return this; + } + + public Boolean getAllDestinations() { + return allDestinations; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination that = + (CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination) o; + return Objects.equals(allDestinations, that.allDestinations); + } + + @Override + public int hashCode() { + return Objects.hash(allDestinations); + } + + @Override + public String toString() { + return new ToStringer(CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination.class) + .add("allDestinations", allDestinations) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java index fc73424f0..f3e8d0cc0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java @@ -21,10 +21,7 @@ public class CustomerFacingIngressNetworkPolicyPublicIngressRule { @JsonProperty("destination") private CustomerFacingIngressNetworkPolicyRequestDestination destination; - /** - * User-provided name for this ingress rule. Helps identify which rule caused a request to be - * denied or dry-run denied. - */ + /** The label for this ingress rule. */ @JsonProperty("label") private String label; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java index 39a13b1fb..87f91f574 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java @@ -16,6 +16,14 @@ public class CustomerFacingIngressNetworkPolicyRequestDestination { @JsonProperty("all_destinations") private Boolean allDestinations; + /** */ + @JsonProperty("apps_runtime") + private CustomerFacingIngressNetworkPolicyAppsRuntimeDestination appsRuntime; + + /** */ + @JsonProperty("lakebase_runtime") + private CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination lakebaseRuntime; + /** */ @JsonProperty("workspace_api") private CustomerFacingIngressNetworkPolicyWorkspaceApiDestination workspaceApi; @@ -34,6 +42,26 @@ public Boolean getAllDestinations() { return allDestinations; } + public CustomerFacingIngressNetworkPolicyRequestDestination setAppsRuntime( + CustomerFacingIngressNetworkPolicyAppsRuntimeDestination appsRuntime) { + this.appsRuntime = appsRuntime; + return this; + } + + public CustomerFacingIngressNetworkPolicyAppsRuntimeDestination getAppsRuntime() { + return appsRuntime; + } + + public CustomerFacingIngressNetworkPolicyRequestDestination setLakebaseRuntime( + CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination lakebaseRuntime) { + this.lakebaseRuntime = lakebaseRuntime; + return this; + } + + public CustomerFacingIngressNetworkPolicyLakebaseRuntimeDestination getLakebaseRuntime() { + return lakebaseRuntime; + } + public CustomerFacingIngressNetworkPolicyRequestDestination setWorkspaceApi( CustomerFacingIngressNetworkPolicyWorkspaceApiDestination workspaceApi) { this.workspaceApi = workspaceApi; @@ -61,19 +89,23 @@ public boolean equals(Object o) { CustomerFacingIngressNetworkPolicyRequestDestination that = (CustomerFacingIngressNetworkPolicyRequestDestination) o; return Objects.equals(allDestinations, that.allDestinations) + && Objects.equals(appsRuntime, that.appsRuntime) + && Objects.equals(lakebaseRuntime, that.lakebaseRuntime) && Objects.equals(workspaceApi, that.workspaceApi) && Objects.equals(workspaceUi, that.workspaceUi); } @Override public int hashCode() { - return Objects.hash(allDestinations, workspaceApi, workspaceUi); + return Objects.hash(allDestinations, appsRuntime, lakebaseRuntime, workspaceApi, workspaceUi); } @Override public String toString() { return new ToStringer(CustomerFacingIngressNetworkPolicyRequestDestination.class) .add("allDestinations", allDestinations) + .add("appsRuntime", appsRuntime) + .add("lakebaseRuntime", lakebaseRuntime) .add("workspaceApi", workspaceApi) .add("workspaceUi", workspaceUi) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java index 886b66041..d98ef6274 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicy.java @@ -26,6 +26,15 @@ public class EgressNetworkPolicyNetworkAccessPolicy { private Collection allowedStorageDestinations; + /** + * List of internet destinations that serverless workloads are blocked from accessing. These + * destinations are enforced when restriction mode is RESTRICTED_ACCESS or DRY_RUN. Currently + * supports DNS_NAME type only; IP_RANGE support is planned. + */ + @JsonProperty("blocked_internet_destinations") + private Collection + blockedInternetDestinations; + /** Optional. When policy_enforcement is not provided, we default to ENFORCE_MODE_ALL_SERVICES */ @JsonProperty("policy_enforcement") private EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement policyEnforcement; @@ -58,6 +67,18 @@ public EgressNetworkPolicyNetworkAccessPolicy setAllowedStorageDestinations( return allowedStorageDestinations; } + public EgressNetworkPolicyNetworkAccessPolicy setBlockedInternetDestinations( + Collection + blockedInternetDestinations) { + this.blockedInternetDestinations = blockedInternetDestinations; + return this; + } + + public Collection + getBlockedInternetDestinations() { + return blockedInternetDestinations; + } + public EgressNetworkPolicyNetworkAccessPolicy setPolicyEnforcement( EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement policyEnforcement) { this.policyEnforcement = policyEnforcement; @@ -85,6 +106,7 @@ public boolean equals(Object o) { EgressNetworkPolicyNetworkAccessPolicy that = (EgressNetworkPolicyNetworkAccessPolicy) o; return Objects.equals(allowedInternetDestinations, that.allowedInternetDestinations) && Objects.equals(allowedStorageDestinations, that.allowedStorageDestinations) + && Objects.equals(blockedInternetDestinations, that.blockedInternetDestinations) && Objects.equals(policyEnforcement, that.policyEnforcement) && Objects.equals(restrictionMode, that.restrictionMode); } @@ -94,6 +116,7 @@ public int hashCode() { return Objects.hash( allowedInternetDestinations, allowedStorageDestinations, + blockedInternetDestinations, policyEnforcement, restrictionMode); } @@ -103,6 +126,7 @@ public String toString() { return new ToStringer(EgressNetworkPolicyNetworkAccessPolicy.class) .add("allowedInternetDestinations", allowedInternetDestinations) .add("allowedStorageDestinations", allowedStorageDestinations) + .add("blockedInternetDestinations", blockedInternetDestinations) .add("policyEnforcement", policyEnforcement) .add("restrictionMode", restrictionMode) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java index e0274b63a..d15d8820b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java @@ -56,6 +56,16 @@ public Iterable list() { null, (Void v) -> impl.list(), ListPublicTokensResponse::getTokenInfos, response -> null); } + /** + * Updates the comment or scopes of a token. + * + *

If a token with the specified ID is not valid, this call returns an error + * **RESOURCE_DOES_NOT_EXIST**. + */ + public UpdateTokenResponse update(UpdateTokenRequest request) { + return impl.update(request); + } + public TokensService impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java index 5de1f612b..f248f398f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java @@ -67,4 +67,22 @@ public ListPublicTokensResponse list() { throw new DatabricksException("IO error: " + e.getMessage(), e); } } + + @Override + public UpdateTokenResponse update(UpdateTokenRequest request) { + String path = String.format("/api/2.0/token/%s", request.getTokenId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, UpdateTokenResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java index 9848537ed..05155d7f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java @@ -30,4 +30,12 @@ public interface TokensService { /** Lists all the valid tokens for a user-workspace pair. */ ListPublicTokensResponse list(); + + /** + * Updates the comment or scopes of a token. + * + *

If a token with the specified ID is not valid, this call returns an error + * **RESOURCE_DOES_NOT_EXIST**. + */ + UpdateTokenResponse update(UpdateTokenRequest updateTokenRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenRequest.java new file mode 100755 index 000000000..0802f7c59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenRequest.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.FieldMask; +import java.util.Objects; + +@Generated +public class UpdateTokenRequest { + /** */ + @JsonProperty("token") + private PublicTokenInfo token; + + /** The SHA-256 hash of the token to be updated. */ + @JsonIgnore private String tokenId; + + /** + * A list of field name under PublicTokenInfo, For example in request use {"update_mask": + * "comment,scopes"} + * + *

The field mask must be a single string, with multiple fields separated by commas (no + * spaces). The field path is relative to the resource object, using a dot (`.`) to navigate + * sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is + * not allowed, as only the entire collection field can be specified. Field names must exactly + * match the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("update_mask") + private FieldMask updateMask; + + public UpdateTokenRequest setToken(PublicTokenInfo token) { + this.token = token; + return this; + } + + public PublicTokenInfo getToken() { + return token; + } + + public UpdateTokenRequest setTokenId(String tokenId) { + this.tokenId = tokenId; + return this; + } + + public String getTokenId() { + return tokenId; + } + + public UpdateTokenRequest setUpdateMask(FieldMask updateMask) { + this.updateMask = updateMask; + return this; + } + + public FieldMask getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateTokenRequest that = (UpdateTokenRequest) o; + return Objects.equals(token, that.token) + && Objects.equals(tokenId, that.tokenId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(token, tokenId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateTokenRequest.class) + .add("token", token) + .add("tokenId", tokenId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenResponse.java new file mode 100755 index 000000000..a25e7eec1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateTokenResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class UpdateTokenResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateTokenResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java new file mode 100755 index 000000000..e2876bbe3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/App.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Databricks app. Supported app: custom mcp, custom agent. */ +@Generated +public class App { + /** App name */ + @JsonProperty("name") + private String name; + + public App setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + App that = (App) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(App.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java new file mode 100755 index 000000000..1093f2d99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Connection.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Databricks connection. Supported connection: external mcp server. */ +@Generated +public class Connection { + /** */ + @JsonProperty("name") + private String name; + + public Connection setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Connection that = (Connection) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(Connection.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java new file mode 100755 index 000000000..4e056e875 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateSupervisorAgentRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateSupervisorAgentRequest { + /** The Supervisor Agent to create. */ + @JsonProperty("supervisor_agent") + private SupervisorAgent supervisorAgent; + + public CreateSupervisorAgentRequest setSupervisorAgent(SupervisorAgent supervisorAgent) { + this.supervisorAgent = supervisorAgent; + return this; + } + + public SupervisorAgent getSupervisorAgent() { + return supervisorAgent; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSupervisorAgentRequest that = (CreateSupervisorAgentRequest) o; + return Objects.equals(supervisorAgent, that.supervisorAgent); + } + + @Override + public int hashCode() { + return Objects.hash(supervisorAgent); + } + + @Override + public String toString() { + return new ToStringer(CreateSupervisorAgentRequest.class) + .add("supervisorAgent", supervisorAgent) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java new file mode 100755 index 000000000..d7a3be1d7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/CreateToolRequest.java @@ -0,0 +1,81 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateToolRequest { + /** + * Parent resource where this tool will be created. Format: + * supervisor-agents/{supervisor_agent_id} + */ + @JsonIgnore private String parent; + + /** */ + @JsonProperty("tool") + private Tool tool; + + /** + * The ID to use for the tool, which will become the final component of the tool's resource name. + */ + @JsonIgnore + @QueryParam("tool_id") + private String toolId; + + public CreateToolRequest setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + public CreateToolRequest setTool(Tool tool) { + this.tool = tool; + return this; + } + + public Tool getTool() { + return tool; + } + + public CreateToolRequest setToolId(String toolId) { + this.toolId = toolId; + return this; + } + + public String getToolId() { + return toolId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateToolRequest that = (CreateToolRequest) o; + return Objects.equals(parent, that.parent) + && Objects.equals(tool, that.tool) + && Objects.equals(toolId, that.toolId); + } + + @Override + public int hashCode() { + return Objects.hash(parent, tool, toolId); + } + + @Override + public String toString() { + return new ToStringer(CreateToolRequest.class) + .add("parent", parent) + .add("tool", tool) + .add("toolId", toolId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java new file mode 100755 index 000000000..ad751ef0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteSupervisorAgentRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteSupervisorAgentRequest { + /** The resource name of the Supervisor Agent. Format: supervisor-agents/{supervisor_agent_id} */ + @JsonIgnore private String name; + + public DeleteSupervisorAgentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteSupervisorAgentRequest that = (DeleteSupervisorAgentRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteSupervisorAgentRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteToolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteToolRequest.java new file mode 100755 index 000000000..e16e806cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/DeleteToolRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteToolRequest { + /** + * The resource name of the Tool. Format: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} + */ + @JsonIgnore private String name; + + public DeleteToolRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteToolRequest that = (DeleteToolRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteToolRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GenieSpace.java new file mode 100755 index 000000000..5fac01eda --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GenieSpace.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieSpace { + /** The ID of the genie space. */ + @JsonProperty("id") + private String id; + + public GenieSpace setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieSpace that = (GenieSpace) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GenieSpace.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetSupervisorAgentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetSupervisorAgentRequest.java new file mode 100755 index 000000000..8429b659a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetSupervisorAgentRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetSupervisorAgentRequest { + /** The resource name of the Supervisor Agent. Format: supervisor-agents/{supervisor_agent_id} */ + @JsonIgnore private String name; + + public GetSupervisorAgentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSupervisorAgentRequest that = (GetSupervisorAgentRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetSupervisorAgentRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetToolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetToolRequest.java new file mode 100755 index 000000000..b989de681 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/GetToolRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetToolRequest { + /** + * The resource name of the Tool. Format: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} + */ + @JsonIgnore private String name; + + public GetToolRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetToolRequest that = (GetToolRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetToolRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/KnowledgeAssistant.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/KnowledgeAssistant.java new file mode 100755 index 000000000..6aad1e016 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/KnowledgeAssistant.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class KnowledgeAssistant { + /** The ID of the knowledge assistant. */ + @JsonProperty("knowledge_assistant_id") + private String knowledgeAssistantId; + + /** Deprecated: use knowledge_assistant_id instead. */ + @JsonProperty("serving_endpoint_name") + private String servingEndpointName; + + public KnowledgeAssistant setKnowledgeAssistantId(String knowledgeAssistantId) { + this.knowledgeAssistantId = knowledgeAssistantId; + return this; + } + + public String getKnowledgeAssistantId() { + return knowledgeAssistantId; + } + + public KnowledgeAssistant setServingEndpointName(String servingEndpointName) { + this.servingEndpointName = servingEndpointName; + return this; + } + + public String getServingEndpointName() { + return servingEndpointName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + KnowledgeAssistant that = (KnowledgeAssistant) o; + return Objects.equals(knowledgeAssistantId, that.knowledgeAssistantId) + && Objects.equals(servingEndpointName, that.servingEndpointName); + } + + @Override + public int hashCode() { + return Objects.hash(knowledgeAssistantId, servingEndpointName); + } + + @Override + public String toString() { + return new ToStringer(KnowledgeAssistant.class) + .add("knowledgeAssistantId", knowledgeAssistantId) + .add("servingEndpointName", servingEndpointName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsRequest.java new file mode 100755 index 000000000..14b595a87 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsRequest.java @@ -0,0 +1,67 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListSupervisorAgentsRequest { + /** + * The maximum number of supervisor agents to return. If unspecified, at most 100 supervisor + * agents will be returned. The maximum value is 100; values above 100 will be coerced to 100. + */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** + * A page token, received from a previous `ListSupervisorAgents` call. Provide this to retrieve + * the subsequent page. If unspecified, the first page will be returned. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListSupervisorAgentsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListSupervisorAgentsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSupervisorAgentsRequest that = (ListSupervisorAgentsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListSupervisorAgentsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsResponse.java new file mode 100755 index 000000000..25dc4747a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListSupervisorAgentsResponse.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListSupervisorAgentsResponse { + /** + * A token that can be sent as `page_token` to retrieve the next page. If this field is omitted, + * there are no subsequent pages. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("supervisor_agents") + private Collection supervisorAgents; + + public ListSupervisorAgentsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListSupervisorAgentsResponse setSupervisorAgents( + Collection supervisorAgents) { + this.supervisorAgents = supervisorAgents; + return this; + } + + public Collection getSupervisorAgents() { + return supervisorAgents; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListSupervisorAgentsResponse that = (ListSupervisorAgentsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(supervisorAgents, that.supervisorAgents); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, supervisorAgents); + } + + @Override + public String toString() { + return new ToStringer(ListSupervisorAgentsResponse.class) + .add("nextPageToken", nextPageToken) + .add("supervisorAgents", supervisorAgents) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsRequest.java new file mode 100755 index 000000000..69c824b4d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsRequest.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListToolsRequest { + /** */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** Parent resource to list from. Format: supervisor-agents/{supervisor_agent_id} */ + @JsonIgnore private String parent; + + public ListToolsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListToolsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListToolsRequest setParent(String parent) { + this.parent = parent; + return this; + } + + public String getParent() { + return parent; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListToolsRequest that = (ListToolsRequest) o; + return Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(parent, that.parent); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken, parent); + } + + @Override + public String toString() { + return new ToStringer(ListToolsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("parent", parent) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsResponse.java new file mode 100755 index 000000000..61ca1bf8d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/ListToolsResponse.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListToolsResponse { + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("tools") + private Collection tools; + + public ListToolsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListToolsResponse setTools(Collection tools) { + this.tools = tools; + return this; + } + + public Collection getTools() { + return tools; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListToolsResponse that = (ListToolsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(tools, that.tools); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, tools); + } + + @Override + public String toString() { + return new ToStringer(ListToolsResponse.class) + .add("nextPageToken", nextPageToken) + .add("tools", tools) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgent.java new file mode 100755 index 000000000..f74cdb6a5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgent.java @@ -0,0 +1,190 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +@Generated +public class SupervisorAgent { + /** Creation timestamp. */ + @JsonProperty("create_time") + private Timestamp createTime; + + /** The creator of the Supervisor Agent. */ + @JsonProperty("creator") + private String creator; + + /** Description of what this agent can do (user-facing). */ + @JsonProperty("description") + private String description; + + /** The display name of the Supervisor Agent, unique at workspace level. */ + @JsonProperty("display_name") + private String displayName; + + /** The name of the supervisor agent's serving endpoint. */ + @JsonProperty("endpoint_name") + private String endpointName; + + /** The MLflow experiment ID. */ + @JsonProperty("experiment_id") + private String experimentId; + + /** Deprecated: Use supervisor_agent_id instead. */ + @JsonProperty("id") + private String id; + + /** Optional natural-language instructions for the supervisor agent. */ + @JsonProperty("instructions") + private String instructions; + + /** The resource name of the SupervisorAgent. Format: supervisor-agents/{supervisor_agent_id} */ + @JsonProperty("name") + private String name; + + /** The universally unique identifier (UUID) of the Supervisor Agent. */ + @JsonProperty("supervisor_agent_id") + private String supervisorAgentId; + + public SupervisorAgent setCreateTime(Timestamp createTime) { + this.createTime = createTime; + return this; + } + + public Timestamp getCreateTime() { + return createTime; + } + + public SupervisorAgent setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public SupervisorAgent setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public SupervisorAgent setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public SupervisorAgent setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public SupervisorAgent setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public SupervisorAgent setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public SupervisorAgent setInstructions(String instructions) { + this.instructions = instructions; + return this; + } + + public String getInstructions() { + return instructions; + } + + public SupervisorAgent setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public SupervisorAgent setSupervisorAgentId(String supervisorAgentId) { + this.supervisorAgentId = supervisorAgentId; + return this; + } + + public String getSupervisorAgentId() { + return supervisorAgentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SupervisorAgent that = (SupervisorAgent) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(creator, that.creator) + && Objects.equals(description, that.description) + && Objects.equals(displayName, that.displayName) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(experimentId, that.experimentId) + && Objects.equals(id, that.id) + && Objects.equals(instructions, that.instructions) + && Objects.equals(name, that.name) + && Objects.equals(supervisorAgentId, that.supervisorAgentId); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + creator, + description, + displayName, + endpointName, + experimentId, + id, + instructions, + name, + supervisorAgentId); + } + + @Override + public String toString() { + return new ToStringer(SupervisorAgent.class) + .add("createTime", createTime) + .add("creator", creator) + .add("description", description) + .add("displayName", displayName) + .add("endpointName", endpointName) + .add("experimentId", experimentId) + .add("id", id) + .add("instructions", instructions) + .add("name", name) + .add("supervisorAgentId", supervisorAgentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java new file mode 100755 index 000000000..f5a9dc48e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsAPI.java @@ -0,0 +1,129 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Manage Supervisor Agents and related resources. */ +@Generated +public class SupervisorAgentsAPI { + private static final Logger LOG = LoggerFactory.getLogger(SupervisorAgentsAPI.class); + + private final SupervisorAgentsService impl; + + /** Regular-use constructor */ + public SupervisorAgentsAPI(ApiClient apiClient) { + impl = new SupervisorAgentsImpl(apiClient); + } + + /** Constructor for mocks */ + public SupervisorAgentsAPI(SupervisorAgentsService mock) { + impl = mock; + } + + /** Creates a new Supervisor Agent. */ + public SupervisorAgent createSupervisorAgent(CreateSupervisorAgentRequest request) { + return impl.createSupervisorAgent(request); + } + + /** + * Creates a Tool under a Supervisor Agent. Specify one of "genie_space", "knowledge_assistant", + * "uc_function", "connection", "app", "volume", "lakeview_dashboard" in the request body. + */ + public Tool createTool(CreateToolRequest request) { + return impl.createTool(request); + } + + public void deleteSupervisorAgent(String name) { + deleteSupervisorAgent(new DeleteSupervisorAgentRequest().setName(name)); + } + + /** Deletes a Supervisor Agent. */ + public void deleteSupervisorAgent(DeleteSupervisorAgentRequest request) { + impl.deleteSupervisorAgent(request); + } + + public void deleteTool(String name) { + deleteTool(new DeleteToolRequest().setName(name)); + } + + /** Deletes a Tool. */ + public void deleteTool(DeleteToolRequest request) { + impl.deleteTool(request); + } + + public SupervisorAgent getSupervisorAgent(String name) { + return getSupervisorAgent(new GetSupervisorAgentRequest().setName(name)); + } + + /** Gets a Supervisor Agent. */ + public SupervisorAgent getSupervisorAgent(GetSupervisorAgentRequest request) { + return impl.getSupervisorAgent(request); + } + + public Tool getTool(String name) { + return getTool(new GetToolRequest().setName(name)); + } + + /** Gets a Tool. */ + public Tool getTool(GetToolRequest request) { + return impl.getTool(request); + } + + /** Lists Supervisor Agents. */ + public Iterable listSupervisorAgents(ListSupervisorAgentsRequest request) { + return new Paginator<>( + request, + impl::listSupervisorAgents, + ListSupervisorAgentsResponse::getSupervisorAgents, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable listTools(String parent) { + return listTools(new ListToolsRequest().setParent(parent)); + } + + /** Lists Tools under a Supervisor Agent. */ + public Iterable listTools(ListToolsRequest request) { + return new Paginator<>( + request, + impl::listTools, + ListToolsResponse::getTools, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** + * Updates a Supervisor Agent. The fields that are required depend on the paths specified in + * `update_mask`. Only fields included in the mask will be updated. + */ + public SupervisorAgent updateSupervisorAgent(UpdateSupervisorAgentRequest request) { + return impl.updateSupervisorAgent(request); + } + + /** + * Updates a Tool. Only the `description` field can be updated. To change immutable fields such as + * tool type, spec, or tool ID, delete the tool and recreate it. + */ + public Tool updateTool(UpdateToolRequest request) { + return impl.updateTool(request); + } + + public SupervisorAgentsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsImpl.java new file mode 100755 index 000000000..d8cb83e00 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsImpl.java @@ -0,0 +1,192 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of SupervisorAgents */ +@Generated +class SupervisorAgentsImpl implements SupervisorAgentsService { + private final ApiClient apiClient; + + public SupervisorAgentsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public SupervisorAgent createSupervisorAgent(CreateSupervisorAgentRequest request) { + String path = "/api/2.1/supervisor-agents"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getSupervisorAgent())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, SupervisorAgent.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Tool createTool(CreateToolRequest request) { + String path = String.format("/api/2.1/%s/tools", request.getParent()); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getTool())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Tool.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteSupervisorAgent(DeleteSupervisorAgentRequest request) { + String path = String.format("/api/2.1/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteTool(DeleteToolRequest request) { + String path = String.format("/api/2.1/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public SupervisorAgent getSupervisorAgent(GetSupervisorAgentRequest request) { + String path = String.format("/api/2.1/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, SupervisorAgent.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Tool getTool(GetToolRequest request) { + String path = String.format("/api/2.1/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Tool.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListSupervisorAgentsResponse listSupervisorAgents(ListSupervisorAgentsRequest request) { + String path = "/api/2.1/supervisor-agents"; + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, ListSupervisorAgentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListToolsResponse listTools(ListToolsRequest request) { + String path = String.format("/api/2.1/%s/tools", request.getParent()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, ListToolsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public SupervisorAgent updateSupervisorAgent(UpdateSupervisorAgentRequest request) { + String path = String.format("/api/2.1/%s", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getSupervisorAgent())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, SupervisorAgent.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Tool updateTool(UpdateToolRequest request) { + String path = String.format("/api/2.1/%s", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getTool())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Tool.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java new file mode 100755 index 000000000..2445a0bb1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/SupervisorAgentsService.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; + +/** + * Manage Supervisor Agents and related resources. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface SupervisorAgentsService { + /** Creates a new Supervisor Agent. */ + SupervisorAgent createSupervisorAgent(CreateSupervisorAgentRequest createSupervisorAgentRequest); + + /** + * Creates a Tool under a Supervisor Agent. Specify one of "genie_space", "knowledge_assistant", + * "uc_function", "connection", "app", "volume", "lakeview_dashboard" in the request body. + */ + Tool createTool(CreateToolRequest createToolRequest); + + /** Deletes a Supervisor Agent. */ + void deleteSupervisorAgent(DeleteSupervisorAgentRequest deleteSupervisorAgentRequest); + + /** Deletes a Tool. */ + void deleteTool(DeleteToolRequest deleteToolRequest); + + /** Gets a Supervisor Agent. */ + SupervisorAgent getSupervisorAgent(GetSupervisorAgentRequest getSupervisorAgentRequest); + + /** Gets a Tool. */ + Tool getTool(GetToolRequest getToolRequest); + + /** Lists Supervisor Agents. */ + ListSupervisorAgentsResponse listSupervisorAgents( + ListSupervisorAgentsRequest listSupervisorAgentsRequest); + + /** Lists Tools under a Supervisor Agent. */ + ListToolsResponse listTools(ListToolsRequest listToolsRequest); + + /** + * Updates a Supervisor Agent. The fields that are required depend on the paths specified in + * `update_mask`. Only fields included in the mask will be updated. + */ + SupervisorAgent updateSupervisorAgent(UpdateSupervisorAgentRequest updateSupervisorAgentRequest); + + /** + * Updates a Tool. Only the `description` field can be updated. To change immutable fields such as + * tool type, spec, or tool ID, delete the tool and recreate it. + */ + Tool updateTool(UpdateToolRequest updateToolRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java new file mode 100755 index 000000000..7fede2aac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Tool.java @@ -0,0 +1,208 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class Tool { + /** */ + @JsonProperty("app") + private App app; + + /** */ + @JsonProperty("connection") + private Connection connection; + + /** Description of what this tool does (user-facing). */ + @JsonProperty("description") + private String description; + + /** */ + @JsonProperty("genie_space") + private GenieSpace genieSpace; + + /** Deprecated: Use tool_id instead. */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("knowledge_assistant") + private KnowledgeAssistant knowledgeAssistant; + + /** Full resource name: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} */ + @JsonProperty("name") + private String name; + + /** User specified id of the Tool. */ + @JsonProperty("tool_id") + private String toolId; + + /** + * Tool type. Must be one of: "genie_space", "knowledge_assistant", "uc_function", "connection", + * "app", "volume", "lakeview_dashboard", "serving_endpoint". + */ + @JsonProperty("tool_type") + private String toolType; + + /** */ + @JsonProperty("uc_function") + private UcFunction ucFunction; + + /** */ + @JsonProperty("volume") + private Volume volume; + + public Tool setApp(App app) { + this.app = app; + return this; + } + + public App getApp() { + return app; + } + + public Tool setConnection(Connection connection) { + this.connection = connection; + return this; + } + + public Connection getConnection() { + return connection; + } + + public Tool setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public Tool setGenieSpace(GenieSpace genieSpace) { + this.genieSpace = genieSpace; + return this; + } + + public GenieSpace getGenieSpace() { + return genieSpace; + } + + public Tool setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public Tool setKnowledgeAssistant(KnowledgeAssistant knowledgeAssistant) { + this.knowledgeAssistant = knowledgeAssistant; + return this; + } + + public KnowledgeAssistant getKnowledgeAssistant() { + return knowledgeAssistant; + } + + public Tool setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Tool setToolId(String toolId) { + this.toolId = toolId; + return this; + } + + public String getToolId() { + return toolId; + } + + public Tool setToolType(String toolType) { + this.toolType = toolType; + return this; + } + + public String getToolType() { + return toolType; + } + + public Tool setUcFunction(UcFunction ucFunction) { + this.ucFunction = ucFunction; + return this; + } + + public UcFunction getUcFunction() { + return ucFunction; + } + + public Tool setVolume(Volume volume) { + this.volume = volume; + return this; + } + + public Volume getVolume() { + return volume; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Tool that = (Tool) o; + return Objects.equals(app, that.app) + && Objects.equals(connection, that.connection) + && Objects.equals(description, that.description) + && Objects.equals(genieSpace, that.genieSpace) + && Objects.equals(id, that.id) + && Objects.equals(knowledgeAssistant, that.knowledgeAssistant) + && Objects.equals(name, that.name) + && Objects.equals(toolId, that.toolId) + && Objects.equals(toolType, that.toolType) + && Objects.equals(ucFunction, that.ucFunction) + && Objects.equals(volume, that.volume); + } + + @Override + public int hashCode() { + return Objects.hash( + app, + connection, + description, + genieSpace, + id, + knowledgeAssistant, + name, + toolId, + toolType, + ucFunction, + volume); + } + + @Override + public String toString() { + return new ToStringer(Tool.class) + .add("app", app) + .add("connection", connection) + .add("description", description) + .add("genieSpace", genieSpace) + .add("id", id) + .add("knowledgeAssistant", knowledgeAssistant) + .add("name", name) + .add("toolId", toolId) + .add("toolType", toolType) + .add("ucFunction", ucFunction) + .add("volume", volume) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java new file mode 100755 index 000000000..58e617513 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UcFunction.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UcFunction { + /** Full uc function name */ + @JsonProperty("name") + private String name; + + public UcFunction setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UcFunction that = (UcFunction) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(UcFunction.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java new file mode 100755 index 000000000..74290346d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateSupervisorAgentRequest.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.FieldMask; +import java.util.Objects; + +@Generated +public class UpdateSupervisorAgentRequest { + /** The resource name of the SupervisorAgent. Format: supervisor-agents/{supervisor_agent_id} */ + @JsonIgnore private String name; + + /** The SupervisorAgent to update. */ + @JsonProperty("supervisor_agent") + private SupervisorAgent supervisorAgent; + + /** Field mask for fields to be updated. */ + @JsonIgnore + @QueryParam("update_mask") + private FieldMask updateMask; + + public UpdateSupervisorAgentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateSupervisorAgentRequest setSupervisorAgent(SupervisorAgent supervisorAgent) { + this.supervisorAgent = supervisorAgent; + return this; + } + + public SupervisorAgent getSupervisorAgent() { + return supervisorAgent; + } + + public UpdateSupervisorAgentRequest setUpdateMask(FieldMask updateMask) { + this.updateMask = updateMask; + return this; + } + + public FieldMask getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateSupervisorAgentRequest that = (UpdateSupervisorAgentRequest) o; + return Objects.equals(name, that.name) + && Objects.equals(supervisorAgent, that.supervisorAgent) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(name, supervisorAgent, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateSupervisorAgentRequest.class) + .add("name", name) + .add("supervisorAgent", supervisorAgent) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java new file mode 100755 index 000000000..027cf67bc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/UpdateToolRequest.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.FieldMask; +import java.util.Objects; + +@Generated +public class UpdateToolRequest { + /** Full resource name: supervisor-agents/{supervisor_agent_id}/tools/{tool_id} */ + @JsonIgnore private String name; + + /** The Tool to update. */ + @JsonProperty("tool") + private Tool tool; + + /** Field mask for fields to be updated. */ + @JsonIgnore + @QueryParam("update_mask") + private FieldMask updateMask; + + public UpdateToolRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateToolRequest setTool(Tool tool) { + this.tool = tool; + return this; + } + + public Tool getTool() { + return tool; + } + + public UpdateToolRequest setUpdateMask(FieldMask updateMask) { + this.updateMask = updateMask; + return this; + } + + public FieldMask getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateToolRequest that = (UpdateToolRequest) o; + return Objects.equals(name, that.name) + && Objects.equals(tool, that.tool) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(name, tool, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateToolRequest.class) + .add("name", name) + .add("tool", tool) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Volume.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Volume.java new file mode 100755 index 000000000..6c96aff1c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/supervisoragents/Volume.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.supervisoragents; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class Volume { + /** Full uc volume name */ + @JsonProperty("name") + private String name; + + public Volume setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Volume that = (Volume) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(Volume.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java index 9f2f17700..d8cbaf63a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java @@ -10,6 +10,14 @@ @Generated public class DeltaSyncVectorIndexSpecResponse { + /** + * [Optional] Select the columns to sync with the vector index. If you leave this field blank, all + * columns from the source table are synced with the index. The primary key column and embedding + * source column or embedding vector column are always synced. + */ + @JsonProperty("columns_to_sync") + private Collection columnsToSync; + /** The columns that contain the embedding source. */ @JsonProperty("embedding_source_columns") private Collection embeddingSourceColumns; @@ -43,6 +51,15 @@ public class DeltaSyncVectorIndexSpecResponse { @JsonProperty("source_table") private String sourceTable; + public DeltaSyncVectorIndexSpecResponse setColumnsToSync(Collection columnsToSync) { + this.columnsToSync = columnsToSync; + return this; + } + + public Collection getColumnsToSync() { + return columnsToSync; + } + public DeltaSyncVectorIndexSpecResponse setEmbeddingSourceColumns( Collection embeddingSourceColumns) { this.embeddingSourceColumns = embeddingSourceColumns; @@ -105,7 +122,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeltaSyncVectorIndexSpecResponse that = (DeltaSyncVectorIndexSpecResponse) o; - return Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) + return Objects.equals(columnsToSync, that.columnsToSync) + && Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns) && Objects.equals(embeddingWritebackTable, that.embeddingWritebackTable) && Objects.equals(pipelineId, that.pipelineId) @@ -116,6 +134,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + columnsToSync, embeddingSourceColumns, embeddingVectorColumns, embeddingWritebackTable, @@ -127,6 +146,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(DeltaSyncVectorIndexSpecResponse.class) + .add("columnsToSync", columnsToSync) .add("embeddingSourceColumns", embeddingSourceColumns) .add("embeddingVectorColumns", embeddingVectorColumns) .add("embeddingWritebackTable", embeddingWritebackTable) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java index 1f31a2159..911c92fce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java @@ -32,10 +32,10 @@ public WorkspaceAPI(WorkspaceService mock) { } /** - * Deprecated: use WorkspaceHierarchyService.DeleteTreeNode instead. Deletes an object or a - * directory (and optionally recursively deletes all objects in the directory). * If `path` does - * not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty - * directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. + * Deletes an object or a directory (and optionally recursively deletes all objects in the + * directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * + * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an + * error `DIRECTORY_NOT_EMPTY`. * *

Object deletion cannot be undone and deleting a directory recursively is not atomic. */ @@ -94,8 +94,8 @@ public ObjectInfo getStatus(String path) { } /** - * Deprecated: use WorkspaceHierarchyService.GetTreeNode instead. Gets the status of an object or - * a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. + * Gets the status of an object or a directory. If `path` does not exist, this call returns an + * error `RESOURCE_DOES_NOT_EXIST`. */ public ObjectInfo getStatus(GetStatusRequest request) { return impl.getStatus(request); @@ -117,9 +117,8 @@ public Iterable list(String path) { } /** - * Deprecated: use WorkspaceHierarchyService.ListTreeNodes instead. Lists the contents of a - * directory, or the object if it is not a directory. If the input path does not exist, this call - * returns an error `RESOURCE_DOES_NOT_EXIST`. + * Lists the contents of a directory, or the object if it is not a directory. If the input path + * does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. */ public Iterable list(ListWorkspaceRequest request) { return new Paginator<>(request, impl::list, ListResponse::getObjects, response -> null); @@ -130,10 +129,9 @@ public void mkdirs(String path) { } /** - * Deprecated: use WorkspaceHierarchyService.CreateTreeNode instead. Creates the specified - * directory (and necessary parent directories if they do not exist). If there is an object (not a - * directory) at any prefix of the input path, this call returns an error - * `RESOURCE_ALREADY_EXISTS`. + * Creates the specified directory (and necessary parent directories if they do not exist). If + * there is an object (not a directory) at any prefix of the input path, this call returns an + * error `RESOURCE_ALREADY_EXISTS`. * *

Note that if this operation fails it may have succeeded in creating some of the necessary * parent directories. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java index a40367da1..c7705bcdf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java @@ -18,10 +18,10 @@ @Generated public interface WorkspaceService { /** - * Deprecated: use WorkspaceHierarchyService.DeleteTreeNode instead. Deletes an object or a - * directory (and optionally recursively deletes all objects in the directory). * If `path` does - * not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty - * directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. + * Deletes an object or a directory (and optionally recursively deletes all objects in the + * directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * + * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an + * error `DIRECTORY_NOT_EMPTY`. * *

Object deletion cannot be undone and deleting a directory recursively is not atomic. */ @@ -49,8 +49,8 @@ WorkspaceObjectPermissions getPermissions( GetWorkspaceObjectPermissionsRequest getWorkspaceObjectPermissionsRequest); /** - * Deprecated: use WorkspaceHierarchyService.GetTreeNode instead. Gets the status of an object or - * a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. + * Gets the status of an object or a directory. If `path` does not exist, this call returns an + * error `RESOURCE_DOES_NOT_EXIST`. */ ObjectInfo getStatus(GetStatusRequest getStatusRequest); @@ -64,17 +64,15 @@ WorkspaceObjectPermissions getPermissions( void importContent(Import importContent); /** - * Deprecated: use WorkspaceHierarchyService.ListTreeNodes instead. Lists the contents of a - * directory, or the object if it is not a directory. If the input path does not exist, this call - * returns an error `RESOURCE_DOES_NOT_EXIST`. + * Lists the contents of a directory, or the object if it is not a directory. If the input path + * does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. */ ListResponse list(ListWorkspaceRequest listWorkspaceRequest); /** - * Deprecated: use WorkspaceHierarchyService.CreateTreeNode instead. Creates the specified - * directory (and necessary parent directories if they do not exist). If there is an object (not a - * directory) at any prefix of the input path, this call returns an error - * `RESOURCE_ALREADY_EXISTS`. + * Creates the specified directory (and necessary parent directories if they do not exist). If + * there is an object (not a directory) at any prefix of the input path, this call returns an + * error `RESOURCE_ALREADY_EXISTS`. * *

Note that if this operation fails it may have succeeded in creating some of the necessary * parent directories. diff --git a/tagging.py b/tagging.py index 56e57781b..79f2894c6 100644 --- a/tagging.py +++ b/tagging.py @@ -18,6 +18,7 @@ CHANGELOG_FILE_NAME = "CHANGELOG.md" PACKAGE_FILE_NAME = ".package.json" CODEGEN_FILE_NAME = ".codegen.json" +CREATED_TAGS_FILE_NAME = "created_tags.json" """ This script tags the release of the SDKs using a combination of the GitHub API and Git commands. It reads the local repository to determine necessary changes, updates changelogs, and creates tags. @@ -467,9 +468,29 @@ def update_changelogs(packages: List[Package]) -> List[TagInfo]: def push_tags(tag_infos: List[TagInfo]) -> None: """ Creates and pushes tags to the repository. + + As a side effect, writes the names of successfully created tags to + ``./created_tags.json`` so that workflows triggering this script can + discover what was produced (the GitHub Actions workflow uploads this + file as the ``created-tags`` artifact). + + Schema: + {"tags": ["service-a/v1.2.3", "service-b/v0.4.0"]} + + The manifest is written even if tag creation fails partway through: + tags that succeeded before the failure are flushed before the + exception is re-raised, so recovery-mode runs still surface their + output. """ - for tag_info in tag_infos: - gh.tag(tag_info.tag_name(), tag_info.content) + created: List[str] = [] + try: + for tag_info in tag_infos: + gh.tag(tag_info.tag_name(), tag_info.content) + created.append(tag_info.tag_name()) + finally: + manifest_path = os.path.join(os.getcwd(), CREATED_TAGS_FILE_NAME) + with open(manifest_path, "w") as f: + json.dump({"tags": created}, f) def run_command(command: List[str]) -> str: