databricks.StorageCredential
Explore with Pulumi AI
This resource can be used with an account or workspace-level provider.
To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:
- databricks.StorageCredentialrepresents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal/managed identity for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.
- databricks.ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.
On AWS, the IAM role for a storage credential requires a trust policy. See documentation for more details. The data source databricks.getAwsUnityCatalogAssumeRolePolicy can be used to create the necessary AWS Unity Catalog assume role policy.
Example Usage
For AWS
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.StorageCredential("external", {
    name: externalDataAccess.name,
    awsIamRole: {
        roleArn: externalDataAccess.arn,
    },
    comment: "Managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
    storageCredential: external.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["CREATE_EXTERNAL_TABLE"],
    }],
});
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
    name=external_data_access["name"],
    aws_iam_role={
        "role_arn": external_data_access["arn"],
    },
    comment="Managed by TF")
external_creds = databricks.Grants("external_creds",
    storage_credential=external.id,
    grants=[{
        "principal": "Data Engineers",
        "privileges": ["CREATE_EXTERNAL_TABLE"],
    }])
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
			Name: pulumi.Any(externalDataAccess.Name),
			AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
				RoleArn: pulumi.Any(externalDataAccess.Arn),
			},
			Comment: pulumi.String("Managed by TF"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
			StorageCredential: external.ID(),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("CREATE_EXTERNAL_TABLE"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var external = new Databricks.StorageCredential("external", new()
    {
        Name = externalDataAccess.Name,
        AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
        {
            RoleArn = externalDataAccess.Arn,
        },
        Comment = "Managed by TF",
    });
    var externalCreds = new Databricks.Grants("external_creds", new()
    {
        StorageCredential = external.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "CREATE_EXTERNAL_TABLE",
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialAwsIamRoleArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var external = new StorageCredential("external", StorageCredentialArgs.builder()
            .name(externalDataAccess.name())
            .awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
                .roleArn(externalDataAccess.arn())
                .build())
            .comment("Managed by TF")
            .build());
        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
            .storageCredential(external.id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("CREATE_EXTERNAL_TABLE")
                .build())
            .build());
    }
}
resources:
  external:
    type: databricks:StorageCredential
    properties:
      name: ${externalDataAccess.name}
      awsIamRole:
        roleArn: ${externalDataAccess.arn}
      comment: Managed by TF
  externalCreds:
    type: databricks:Grants
    name: external_creds
    properties:
      storageCredential: ${external.id}
      grants:
        - principal: Data Engineers
          privileges:
            - CREATE_EXTERNAL_TABLE
For Azure
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const externalMi = new databricks.StorageCredential("external_mi", {
    name: "mi_credential",
    azureManagedIdentity: {
        accessConnectorId: example.id,
    },
    comment: "Managed identity credential managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
    storageCredential: externalMi.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["CREATE_EXTERNAL_TABLE"],
    }],
});
import pulumi
import pulumi_databricks as databricks
external_mi = databricks.StorageCredential("external_mi",
    name="mi_credential",
    azure_managed_identity={
        "access_connector_id": example["id"],
    },
    comment="Managed identity credential managed by TF")
external_creds = databricks.Grants("external_creds",
    storage_credential=external_mi.id,
    grants=[{
        "principal": "Data Engineers",
        "privileges": ["CREATE_EXTERNAL_TABLE"],
    }])
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		externalMi, err := databricks.NewStorageCredential(ctx, "external_mi", &databricks.StorageCredentialArgs{
			Name: pulumi.String("mi_credential"),
			AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
				AccessConnectorId: pulumi.Any(example.Id),
			},
			Comment: pulumi.String("Managed identity credential managed by TF"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
			StorageCredential: externalMi.ID(),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("CREATE_EXTERNAL_TABLE"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var externalMi = new Databricks.StorageCredential("external_mi", new()
    {
        Name = "mi_credential",
        AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
        {
            AccessConnectorId = example.Id,
        },
        Comment = "Managed identity credential managed by TF",
    });
    var externalCreds = new Databricks.Grants("external_creds", new()
    {
        StorageCredential = externalMi.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "CREATE_EXTERNAL_TABLE",
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialAzureManagedIdentityArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var externalMi = new StorageCredential("externalMi", StorageCredentialArgs.builder()
            .name("mi_credential")
            .azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
                .accessConnectorId(example.id())
                .build())
            .comment("Managed identity credential managed by TF")
            .build());
        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
            .storageCredential(externalMi.id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("CREATE_EXTERNAL_TABLE")
                .build())
            .build());
    }
}
resources:
  externalMi:
    type: databricks:StorageCredential
    name: external_mi
    properties:
      name: mi_credential
      azureManagedIdentity:
        accessConnectorId: ${example.id}
      comment: Managed identity credential managed by TF
  externalCreds:
    type: databricks:Grants
    name: external_creds
    properties:
      storageCredential: ${externalMi.id}
      grants:
        - principal: Data Engineers
          privileges:
            - CREATE_EXTERNAL_TABLE
For GCP
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.StorageCredential("external", {
    name: "the-creds",
    databricksGcpServiceAccount: {},
});
const externalCreds = new databricks.Grants("external_creds", {
    storageCredential: external.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["CREATE_EXTERNAL_TABLE"],
    }],
});
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
    name="the-creds",
    databricks_gcp_service_account={})
external_creds = databricks.Grants("external_creds",
    storage_credential=external.id,
    grants=[{
        "principal": "Data Engineers",
        "privileges": ["CREATE_EXTERNAL_TABLE"],
    }])
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
			Name:                        pulumi.String("the-creds"),
			DatabricksGcpServiceAccount: &databricks.StorageCredentialDatabricksGcpServiceAccountArgs{},
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
			StorageCredential: external.ID(),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("CREATE_EXTERNAL_TABLE"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var external = new Databricks.StorageCredential("external", new()
    {
        Name = "the-creds",
        DatabricksGcpServiceAccount = null,
    });
    var externalCreds = new Databricks.Grants("external_creds", new()
    {
        StorageCredential = external.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "CREATE_EXTERNAL_TABLE",
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialDatabricksGcpServiceAccountArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var external = new StorageCredential("external", StorageCredentialArgs.builder()
            .name("the-creds")
            .databricksGcpServiceAccount()
            .build());
        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
            .storageCredential(external.id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("CREATE_EXTERNAL_TABLE")
                .build())
            .build());
    }
}
resources:
  external:
    type: databricks:StorageCredential
    properties:
      name: the-creds
      databricksGcpServiceAccount: {}
  externalCreds:
    type: databricks:Grants
    name: external_creds
    properties:
      storageCredential: ${external.id}
      grants:
        - principal: Data Engineers
          privileges:
            - CREATE_EXTERNAL_TABLE
Create StorageCredential Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new StorageCredential(name: string, args?: StorageCredentialArgs, opts?: CustomResourceOptions);@overload
def StorageCredential(resource_name: str,
                      args: Optional[StorageCredentialArgs] = None,
                      opts: Optional[ResourceOptions] = None)
@overload
def StorageCredential(resource_name: str,
                      opts: Optional[ResourceOptions] = None,
                      aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
                      azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
                      azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
                      cloudflare_api_token: Optional[StorageCredentialCloudflareApiTokenArgs] = None,
                      comment: Optional[str] = None,
                      databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
                      force_destroy: Optional[bool] = None,
                      force_update: Optional[bool] = None,
                      gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
                      isolation_mode: Optional[str] = None,
                      metastore_id: Optional[str] = None,
                      name: Optional[str] = None,
                      owner: Optional[str] = None,
                      read_only: Optional[bool] = None,
                      skip_validation: Optional[bool] = None)func NewStorageCredential(ctx *Context, name string, args *StorageCredentialArgs, opts ...ResourceOption) (*StorageCredential, error)public StorageCredential(string name, StorageCredentialArgs? args = null, CustomResourceOptions? opts = null)
public StorageCredential(String name, StorageCredentialArgs args)
public StorageCredential(String name, StorageCredentialArgs args, CustomResourceOptions options)
type: databricks:StorageCredential
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var storageCredentialResource = new Databricks.StorageCredential("storageCredentialResource", new()
{
    AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
    {
        RoleArn = "string",
        ExternalId = "string",
        UnityCatalogIamArn = "string",
    },
    AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
    {
        AccessConnectorId = "string",
        CredentialId = "string",
        ManagedIdentityId = "string",
    },
    AzureServicePrincipal = new Databricks.Inputs.StorageCredentialAzureServicePrincipalArgs
    {
        ApplicationId = "string",
        ClientSecret = "string",
        DirectoryId = "string",
    },
    CloudflareApiToken = new Databricks.Inputs.StorageCredentialCloudflareApiTokenArgs
    {
        AccessKeyId = "string",
        AccountId = "string",
        SecretAccessKey = "string",
    },
    Comment = "string",
    DatabricksGcpServiceAccount = new Databricks.Inputs.StorageCredentialDatabricksGcpServiceAccountArgs
    {
        CredentialId = "string",
        Email = "string",
    },
    ForceDestroy = false,
    ForceUpdate = false,
    GcpServiceAccountKey = new Databricks.Inputs.StorageCredentialGcpServiceAccountKeyArgs
    {
        Email = "string",
        PrivateKey = "string",
        PrivateKeyId = "string",
    },
    IsolationMode = "string",
    MetastoreId = "string",
    Name = "string",
    Owner = "string",
    ReadOnly = false,
    SkipValidation = false,
});
example, err := databricks.NewStorageCredential(ctx, "storageCredentialResource", &databricks.StorageCredentialArgs{
	AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
		RoleArn:            pulumi.String("string"),
		ExternalId:         pulumi.String("string"),
		UnityCatalogIamArn: pulumi.String("string"),
	},
	AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
		AccessConnectorId: pulumi.String("string"),
		CredentialId:      pulumi.String("string"),
		ManagedIdentityId: pulumi.String("string"),
	},
	AzureServicePrincipal: &databricks.StorageCredentialAzureServicePrincipalArgs{
		ApplicationId: pulumi.String("string"),
		ClientSecret:  pulumi.String("string"),
		DirectoryId:   pulumi.String("string"),
	},
	CloudflareApiToken: &databricks.StorageCredentialCloudflareApiTokenArgs{
		AccessKeyId:     pulumi.String("string"),
		AccountId:       pulumi.String("string"),
		SecretAccessKey: pulumi.String("string"),
	},
	Comment: pulumi.String("string"),
	DatabricksGcpServiceAccount: &databricks.StorageCredentialDatabricksGcpServiceAccountArgs{
		CredentialId: pulumi.String("string"),
		Email:        pulumi.String("string"),
	},
	ForceDestroy: pulumi.Bool(false),
	ForceUpdate:  pulumi.Bool(false),
	GcpServiceAccountKey: &databricks.StorageCredentialGcpServiceAccountKeyArgs{
		Email:        pulumi.String("string"),
		PrivateKey:   pulumi.String("string"),
		PrivateKeyId: pulumi.String("string"),
	},
	IsolationMode:  pulumi.String("string"),
	MetastoreId:    pulumi.String("string"),
	Name:           pulumi.String("string"),
	Owner:          pulumi.String("string"),
	ReadOnly:       pulumi.Bool(false),
	SkipValidation: pulumi.Bool(false),
})
var storageCredentialResource = new StorageCredential("storageCredentialResource", StorageCredentialArgs.builder()
    .awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
        .roleArn("string")
        .externalId("string")
        .unityCatalogIamArn("string")
        .build())
    .azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
        .accessConnectorId("string")
        .credentialId("string")
        .managedIdentityId("string")
        .build())
    .azureServicePrincipal(StorageCredentialAzureServicePrincipalArgs.builder()
        .applicationId("string")
        .clientSecret("string")
        .directoryId("string")
        .build())
    .cloudflareApiToken(StorageCredentialCloudflareApiTokenArgs.builder()
        .accessKeyId("string")
        .accountId("string")
        .secretAccessKey("string")
        .build())
    .comment("string")
    .databricksGcpServiceAccount(StorageCredentialDatabricksGcpServiceAccountArgs.builder()
        .credentialId("string")
        .email("string")
        .build())
    .forceDestroy(false)
    .forceUpdate(false)
    .gcpServiceAccountKey(StorageCredentialGcpServiceAccountKeyArgs.builder()
        .email("string")
        .privateKey("string")
        .privateKeyId("string")
        .build())
    .isolationMode("string")
    .metastoreId("string")
    .name("string")
    .owner("string")
    .readOnly(false)
    .skipValidation(false)
    .build());
storage_credential_resource = databricks.StorageCredential("storageCredentialResource",
    aws_iam_role={
        "role_arn": "string",
        "external_id": "string",
        "unity_catalog_iam_arn": "string",
    },
    azure_managed_identity={
        "access_connector_id": "string",
        "credential_id": "string",
        "managed_identity_id": "string",
    },
    azure_service_principal={
        "application_id": "string",
        "client_secret": "string",
        "directory_id": "string",
    },
    cloudflare_api_token={
        "access_key_id": "string",
        "account_id": "string",
        "secret_access_key": "string",
    },
    comment="string",
    databricks_gcp_service_account={
        "credential_id": "string",
        "email": "string",
    },
    force_destroy=False,
    force_update=False,
    gcp_service_account_key={
        "email": "string",
        "private_key": "string",
        "private_key_id": "string",
    },
    isolation_mode="string",
    metastore_id="string",
    name="string",
    owner="string",
    read_only=False,
    skip_validation=False)
const storageCredentialResource = new databricks.StorageCredential("storageCredentialResource", {
    awsIamRole: {
        roleArn: "string",
        externalId: "string",
        unityCatalogIamArn: "string",
    },
    azureManagedIdentity: {
        accessConnectorId: "string",
        credentialId: "string",
        managedIdentityId: "string",
    },
    azureServicePrincipal: {
        applicationId: "string",
        clientSecret: "string",
        directoryId: "string",
    },
    cloudflareApiToken: {
        accessKeyId: "string",
        accountId: "string",
        secretAccessKey: "string",
    },
    comment: "string",
    databricksGcpServiceAccount: {
        credentialId: "string",
        email: "string",
    },
    forceDestroy: false,
    forceUpdate: false,
    gcpServiceAccountKey: {
        email: "string",
        privateKey: "string",
        privateKeyId: "string",
    },
    isolationMode: "string",
    metastoreId: "string",
    name: "string",
    owner: "string",
    readOnly: false,
    skipValidation: false,
});
type: databricks:StorageCredential
properties:
    awsIamRole:
        externalId: string
        roleArn: string
        unityCatalogIamArn: string
    azureManagedIdentity:
        accessConnectorId: string
        credentialId: string
        managedIdentityId: string
    azureServicePrincipal:
        applicationId: string
        clientSecret: string
        directoryId: string
    cloudflareApiToken:
        accessKeyId: string
        accountId: string
        secretAccessKey: string
    comment: string
    databricksGcpServiceAccount:
        credentialId: string
        email: string
    forceDestroy: false
    forceUpdate: false
    gcpServiceAccountKey:
        email: string
        privateKey: string
        privateKeyId: string
    isolationMode: string
    metastoreId: string
    name: string
    owner: string
    readOnly: false
    skipValidation: false
StorageCredential Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The StorageCredential resource accepts the following input properties:
- AwsIam StorageRole Credential Aws Iam Role 
- AzureManaged StorageIdentity Credential Azure Managed Identity 
- AzureService StoragePrincipal Credential Azure Service Principal 
- CloudflareApi StorageToken Credential Cloudflare Api Token 
- Comment string
- DatabricksGcp StorageService Account Credential Databricks Gcp Service Account 
- ForceDestroy bool
- Delete storage credential regardless of its dependencies.
- ForceUpdate bool
- Update storage credential regardless of its dependents.
- GcpService StorageAccount Key Credential Gcp Service Account Key 
- IsolationMode string
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- MetastoreId string
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the storage credential owner.
- ReadOnly bool
- Indicates whether the storage credential is only usable for read operations.
- SkipValidation bool
- Suppress validation errors if any & force save the storage credential.
- AwsIam StorageRole Credential Aws Iam Role Args 
- AzureManaged StorageIdentity Credential Azure Managed Identity Args 
- AzureService StoragePrincipal Credential Azure Service Principal Args 
- CloudflareApi StorageToken Credential Cloudflare Api Token Args 
- Comment string
- DatabricksGcp StorageService Account Credential Databricks Gcp Service Account Args 
- ForceDestroy bool
- Delete storage credential regardless of its dependencies.
- ForceUpdate bool
- Update storage credential regardless of its dependents.
- GcpService StorageAccount Key Credential Gcp Service Account Key Args 
- IsolationMode string
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- MetastoreId string
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the storage credential owner.
- ReadOnly bool
- Indicates whether the storage credential is only usable for read operations.
- SkipValidation bool
- Suppress validation errors if any & force save the storage credential.
- awsIam StorageRole Credential Aws Iam Role 
- azureManaged StorageIdentity Credential Azure Managed Identity 
- azureService StoragePrincipal Credential Azure Service Principal 
- cloudflareApi StorageToken Credential Cloudflare Api Token 
- comment String
- databricksGcp StorageService Account Credential Databricks Gcp Service Account 
- forceDestroy Boolean
- Delete storage credential regardless of its dependencies.
- forceUpdate Boolean
- Update storage credential regardless of its dependents.
- gcpService StorageAccount Key Credential Gcp Service Account Key 
- isolationMode String
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId String
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the storage credential owner.
- readOnly Boolean
- Indicates whether the storage credential is only usable for read operations.
- skipValidation Boolean
- Suppress validation errors if any & force save the storage credential.
- awsIam StorageRole Credential Aws Iam Role 
- azureManaged StorageIdentity Credential Azure Managed Identity 
- azureService StoragePrincipal Credential Azure Service Principal 
- cloudflareApi StorageToken Credential Cloudflare Api Token 
- comment string
- databricksGcp StorageService Account Credential Databricks Gcp Service Account 
- forceDestroy boolean
- Delete storage credential regardless of its dependencies.
- forceUpdate boolean
- Update storage credential regardless of its dependents.
- gcpService StorageAccount Key Credential Gcp Service Account Key 
- isolationMode string
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId string
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
- Username/groupname/sp application_id of the storage credential owner.
- readOnly boolean
- Indicates whether the storage credential is only usable for read operations.
- skipValidation boolean
- Suppress validation errors if any & force save the storage credential.
- aws_iam_ Storagerole Credential Aws Iam Role Args 
- azure_managed_ Storageidentity Credential Azure Managed Identity Args 
- azure_service_ Storageprincipal Credential Azure Service Principal Args 
- cloudflare_api_ Storagetoken Credential Cloudflare Api Token Args 
- comment str
- databricks_gcp_ Storageservice_ account Credential Databricks Gcp Service Account Args 
- force_destroy bool
- Delete storage credential regardless of its dependencies.
- force_update bool
- Update storage credential regardless of its dependents.
- gcp_service_ Storageaccount_ key Credential Gcp Service Account Key Args 
- isolation_mode str
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastore_id str
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name str
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
- Username/groupname/sp application_id of the storage credential owner.
- read_only bool
- Indicates whether the storage credential is only usable for read operations.
- skip_validation bool
- Suppress validation errors if any & force save the storage credential.
- awsIam Property MapRole 
- azureManaged Property MapIdentity 
- azureService Property MapPrincipal 
- cloudflareApi Property MapToken 
- comment String
- databricksGcp Property MapService Account 
- forceDestroy Boolean
- Delete storage credential regardless of its dependencies.
- forceUpdate Boolean
- Update storage credential regardless of its dependents.
- gcpService Property MapAccount Key 
- isolationMode String
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId String
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the storage credential owner.
- readOnly Boolean
- Indicates whether the storage credential is only usable for read operations.
- skipValidation Boolean
- Suppress validation errors if any & force save the storage credential.
Outputs
All input properties are implicitly available as output properties. Additionally, the StorageCredential resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- StorageCredential stringId 
- Unique ID of storage credential.
- Id string
- The provider-assigned unique ID for this managed resource.
- StorageCredential stringId 
- Unique ID of storage credential.
- id String
- The provider-assigned unique ID for this managed resource.
- storageCredential StringId 
- Unique ID of storage credential.
- id string
- The provider-assigned unique ID for this managed resource.
- storageCredential stringId 
- Unique ID of storage credential.
- id str
- The provider-assigned unique ID for this managed resource.
- storage_credential_ strid 
- Unique ID of storage credential.
- id String
- The provider-assigned unique ID for this managed resource.
- storageCredential StringId 
- Unique ID of storage credential.
Look up Existing StorageCredential Resource
Get an existing StorageCredential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: StorageCredentialState, opts?: CustomResourceOptions): StorageCredential@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
        azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
        azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
        cloudflare_api_token: Optional[StorageCredentialCloudflareApiTokenArgs] = None,
        comment: Optional[str] = None,
        databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
        force_destroy: Optional[bool] = None,
        force_update: Optional[bool] = None,
        gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
        isolation_mode: Optional[str] = None,
        metastore_id: Optional[str] = None,
        name: Optional[str] = None,
        owner: Optional[str] = None,
        read_only: Optional[bool] = None,
        skip_validation: Optional[bool] = None,
        storage_credential_id: Optional[str] = None) -> StorageCredentialfunc GetStorageCredential(ctx *Context, name string, id IDInput, state *StorageCredentialState, opts ...ResourceOption) (*StorageCredential, error)public static StorageCredential Get(string name, Input<string> id, StorageCredentialState? state, CustomResourceOptions? opts = null)public static StorageCredential get(String name, Output<String> id, StorageCredentialState state, CustomResourceOptions options)resources:  _:    type: databricks:StorageCredential    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AwsIam StorageRole Credential Aws Iam Role 
- AzureManaged StorageIdentity Credential Azure Managed Identity 
- AzureService StoragePrincipal Credential Azure Service Principal 
- CloudflareApi StorageToken Credential Cloudflare Api Token 
- Comment string
- DatabricksGcp StorageService Account Credential Databricks Gcp Service Account 
- ForceDestroy bool
- Delete storage credential regardless of its dependencies.
- ForceUpdate bool
- Update storage credential regardless of its dependents.
- GcpService StorageAccount Key Credential Gcp Service Account Key 
- IsolationMode string
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- MetastoreId string
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the storage credential owner.
- ReadOnly bool
- Indicates whether the storage credential is only usable for read operations.
- SkipValidation bool
- Suppress validation errors if any & force save the storage credential.
- StorageCredential stringId 
- Unique ID of storage credential.
- AwsIam StorageRole Credential Aws Iam Role Args 
- AzureManaged StorageIdentity Credential Azure Managed Identity Args 
- AzureService StoragePrincipal Credential Azure Service Principal Args 
- CloudflareApi StorageToken Credential Cloudflare Api Token Args 
- Comment string
- DatabricksGcp StorageService Account Credential Databricks Gcp Service Account Args 
- ForceDestroy bool
- Delete storage credential regardless of its dependencies.
- ForceUpdate bool
- Update storage credential regardless of its dependents.
- GcpService StorageAccount Key Credential Gcp Service Account Key Args 
- IsolationMode string
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- MetastoreId string
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the storage credential owner.
- ReadOnly bool
- Indicates whether the storage credential is only usable for read operations.
- SkipValidation bool
- Suppress validation errors if any & force save the storage credential.
- StorageCredential stringId 
- Unique ID of storage credential.
- awsIam StorageRole Credential Aws Iam Role 
- azureManaged StorageIdentity Credential Azure Managed Identity 
- azureService StoragePrincipal Credential Azure Service Principal 
- cloudflareApi StorageToken Credential Cloudflare Api Token 
- comment String
- databricksGcp StorageService Account Credential Databricks Gcp Service Account 
- forceDestroy Boolean
- Delete storage credential regardless of its dependencies.
- forceUpdate Boolean
- Update storage credential regardless of its dependents.
- gcpService StorageAccount Key Credential Gcp Service Account Key 
- isolationMode String
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId String
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the storage credential owner.
- readOnly Boolean
- Indicates whether the storage credential is only usable for read operations.
- skipValidation Boolean
- Suppress validation errors if any & force save the storage credential.
- storageCredential StringId 
- Unique ID of storage credential.
- awsIam StorageRole Credential Aws Iam Role 
- azureManaged StorageIdentity Credential Azure Managed Identity 
- azureService StoragePrincipal Credential Azure Service Principal 
- cloudflareApi StorageToken Credential Cloudflare Api Token 
- comment string
- databricksGcp StorageService Account Credential Databricks Gcp Service Account 
- forceDestroy boolean
- Delete storage credential regardless of its dependencies.
- forceUpdate boolean
- Update storage credential regardless of its dependents.
- gcpService StorageAccount Key Credential Gcp Service Account Key 
- isolationMode string
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId string
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
- Username/groupname/sp application_id of the storage credential owner.
- readOnly boolean
- Indicates whether the storage credential is only usable for read operations.
- skipValidation boolean
- Suppress validation errors if any & force save the storage credential.
- storageCredential stringId 
- Unique ID of storage credential.
- aws_iam_ Storagerole Credential Aws Iam Role Args 
- azure_managed_ Storageidentity Credential Azure Managed Identity Args 
- azure_service_ Storageprincipal Credential Azure Service Principal Args 
- cloudflare_api_ Storagetoken Credential Cloudflare Api Token Args 
- comment str
- databricks_gcp_ Storageservice_ account Credential Databricks Gcp Service Account Args 
- force_destroy bool
- Delete storage credential regardless of its dependencies.
- force_update bool
- Update storage credential regardless of its dependents.
- gcp_service_ Storageaccount_ key Credential Gcp Service Account Key Args 
- isolation_mode str
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastore_id str
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name str
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
- Username/groupname/sp application_id of the storage credential owner.
- read_only bool
- Indicates whether the storage credential is only usable for read operations.
- skip_validation bool
- Suppress validation errors if any & force save the storage credential.
- storage_credential_ strid 
- Unique ID of storage credential.
- awsIam Property MapRole 
- azureManaged Property MapIdentity 
- azureService Property MapPrincipal 
- cloudflareApi Property MapToken 
- comment String
- databricksGcp Property MapService Account 
- forceDestroy Boolean
- Delete storage credential regardless of its dependencies.
- forceUpdate Boolean
- Update storage credential regardless of its dependents.
- gcpService Property MapAccount Key 
- isolationMode String
- Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically allow access from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId String
- Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the storage credential owner.
- readOnly Boolean
- Indicates whether the storage credential is only usable for read operations.
- skipValidation Boolean
- Suppress validation errors if any & force save the storage credential.
- storageCredential StringId 
- Unique ID of storage credential.
Supporting Types
StorageCredentialAwsIamRole, StorageCredentialAwsIamRoleArgs          
- RoleArn string
- The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- ExternalId string
- UnityCatalog stringIam Arn 
- RoleArn string
- The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- ExternalId string
- UnityCatalog stringIam Arn 
- roleArn String
- The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- externalId String
- unityCatalog StringIam Arn 
- roleArn string
- The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- externalId string
- unityCatalog stringIam Arn 
- role_arn str
- The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- external_id str
- unity_catalog_ striam_ arn 
- roleArn String
- The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- externalId String
- unityCatalog StringIam Arn 
StorageCredentialAzureManagedIdentity, StorageCredentialAzureManagedIdentityArgs          
- AccessConnector stringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- CredentialId string
- ManagedIdentity stringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- AccessConnector stringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- CredentialId string
- ManagedIdentity stringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- accessConnector StringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- credentialId String
- managedIdentity StringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- accessConnector stringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- credentialId string
- managedIdentity stringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- access_connector_ strid 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- credential_id str
- managed_identity_ strid 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- accessConnector StringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- credentialId String
- managedIdentity StringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
StorageCredentialAzureServicePrincipal, StorageCredentialAzureServicePrincipalArgs          
- ApplicationId string
- The application ID of the application registration within the referenced AAD tenant
- ClientSecret string
- The client secret generated for the above app ID in AAD. This field is redacted on output
- DirectoryId string
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- ApplicationId string
- The application ID of the application registration within the referenced AAD tenant
- ClientSecret string
- The client secret generated for the above app ID in AAD. This field is redacted on output
- DirectoryId string
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- applicationId String
- The application ID of the application registration within the referenced AAD tenant
- clientSecret String
- The client secret generated for the above app ID in AAD. This field is redacted on output
- directoryId String
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- applicationId string
- The application ID of the application registration within the referenced AAD tenant
- clientSecret string
- The client secret generated for the above app ID in AAD. This field is redacted on output
- directoryId string
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application_id str
- The application ID of the application registration within the referenced AAD tenant
- client_secret str
- The client secret generated for the above app ID in AAD. This field is redacted on output
- directory_id str
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- applicationId String
- The application ID of the application registration within the referenced AAD tenant
- clientSecret String
- The client secret generated for the above app ID in AAD. This field is redacted on output
- directoryId String
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
StorageCredentialCloudflareApiToken, StorageCredentialCloudflareApiTokenArgs          
- AccessKey stringId 
- R2 API token access key ID
- AccountId string
- R2 account ID
- SecretAccess stringKey 
- R2 API token secret access key - azure_service_principaloptional configuration block to use service principal as credential details for Azure (Legacy):
- AccessKey stringId 
- R2 API token access key ID
- AccountId string
- R2 account ID
- SecretAccess stringKey 
- R2 API token secret access key - azure_service_principaloptional configuration block to use service principal as credential details for Azure (Legacy):
- accessKey StringId 
- R2 API token access key ID
- accountId String
- R2 account ID
- secretAccess StringKey 
- R2 API token secret access key - azure_service_principaloptional configuration block to use service principal as credential details for Azure (Legacy):
- accessKey stringId 
- R2 API token access key ID
- accountId string
- R2 account ID
- secretAccess stringKey 
- R2 API token secret access key - azure_service_principaloptional configuration block to use service principal as credential details for Azure (Legacy):
- access_key_ strid 
- R2 API token access key ID
- account_id str
- R2 account ID
- secret_access_ strkey 
- R2 API token secret access key - azure_service_principaloptional configuration block to use service principal as credential details for Azure (Legacy):
- accessKey StringId 
- R2 API token access key ID
- accountId String
- R2 account ID
- secretAccess StringKey 
- R2 API token secret access key - azure_service_principaloptional configuration block to use service principal as credential details for Azure (Legacy):
StorageCredentialDatabricksGcpServiceAccount, StorageCredentialDatabricksGcpServiceAccountArgs            
- CredentialId string
- Email string
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- CredentialId string
- Email string
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- credentialId String
- email String
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- credentialId string
- email string
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- credential_id str
- email str
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- credentialId String
- email String
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
StorageCredentialGcpServiceAccountKey, StorageCredentialGcpServiceAccountKeyArgs            
- Email string
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- PrivateKey string
- PrivateKey stringId 
- Email string
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- PrivateKey string
- PrivateKey stringId 
- email String
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- privateKey String
- privateKey StringId 
- email string
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- privateKey string
- privateKey stringId 
- email str
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- private_key str
- private_key_ strid 
- email String
- The email of the GCP service account created, to be granted access to relevant buckets. - cloudflare_api_tokenoptional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- privateKey String
- privateKey StringId 
Import
This resource can be imported by name:
bash
$ pulumi import databricks:index/storageCredential:StorageCredential this <name>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the databricksTerraform Provider.