1. Packages
  2. Databricks Provider
  3. API Docs
  4. Pipeline
Databricks v1.64.0 published on Wednesday, Apr 2, 2025 by Pulumi

databricks.Pipeline

Explore with Pulumi AI

Use databricks.Pipeline to deploy Delta Live Tables.

Example Usage

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const dltDemo = new databricks.Notebook("dlt_demo", {});
const dltDemoRepo = new databricks.Repo("dlt_demo", {});
const _this = new databricks.Pipeline("this", {
    name: "Pipeline Name",
    storage: "/test/first-pipeline",
    configuration: {
        key1: "value1",
        key2: "value2",
    },
    clusters: [
        {
            label: "default",
            numWorkers: 2,
            customTags: {
                cluster_type: "default",
            },
        },
        {
            label: "maintenance",
            numWorkers: 1,
            customTags: {
                cluster_type: "maintenance",
            },
        },
    ],
    libraries: [
        {
            notebook: {
                path: dltDemo.id,
            },
        },
        {
            file: {
                path: pulumi.interpolate`${dltDemoRepo.path}/pipeline.sql`,
            },
        },
    ],
    continuous: false,
    notifications: [{
        emailRecipients: [
            "user@domain.com",
            "user1@domain.com",
        ],
        alerts: [
            "on-update-failure",
            "on-update-fatal-failure",
            "on-update-success",
            "on-flow-failure",
        ],
    }],
});
Copy
import pulumi
import pulumi_databricks as databricks

dlt_demo = databricks.Notebook("dlt_demo")
dlt_demo_repo = databricks.Repo("dlt_demo")
this = databricks.Pipeline("this",
    name="Pipeline Name",
    storage="/test/first-pipeline",
    configuration={
        "key1": "value1",
        "key2": "value2",
    },
    clusters=[
        {
            "label": "default",
            "num_workers": 2,
            "custom_tags": {
                "cluster_type": "default",
            },
        },
        {
            "label": "maintenance",
            "num_workers": 1,
            "custom_tags": {
                "cluster_type": "maintenance",
            },
        },
    ],
    libraries=[
        {
            "notebook": {
                "path": dlt_demo.id,
            },
        },
        {
            "file": {
                "path": dlt_demo_repo.path.apply(lambda path: f"{path}/pipeline.sql"),
            },
        },
    ],
    continuous=False,
    notifications=[{
        "email_recipients": [
            "user@domain.com",
            "user1@domain.com",
        ],
        "alerts": [
            "on-update-failure",
            "on-update-fatal-failure",
            "on-update-success",
            "on-flow-failure",
        ],
    }])
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		dltDemo, err := databricks.NewNotebook(ctx, "dlt_demo", nil)
		if err != nil {
			return err
		}
		dltDemoRepo, err := databricks.NewRepo(ctx, "dlt_demo", nil)
		if err != nil {
			return err
		}
		_, err = databricks.NewPipeline(ctx, "this", &databricks.PipelineArgs{
			Name:    pulumi.String("Pipeline Name"),
			Storage: pulumi.String("/test/first-pipeline"),
			Configuration: pulumi.StringMap{
				"key1": pulumi.String("value1"),
				"key2": pulumi.String("value2"),
			},
			Clusters: databricks.PipelineClusterArray{
				&databricks.PipelineClusterArgs{
					Label:      pulumi.String("default"),
					NumWorkers: pulumi.Int(2),
					CustomTags: pulumi.StringMap{
						"cluster_type": pulumi.String("default"),
					},
				},
				&databricks.PipelineClusterArgs{
					Label:      pulumi.String("maintenance"),
					NumWorkers: pulumi.Int(1),
					CustomTags: pulumi.StringMap{
						"cluster_type": pulumi.String("maintenance"),
					},
				},
			},
			Libraries: databricks.PipelineLibraryArray{
				&databricks.PipelineLibraryArgs{
					Notebook: &databricks.PipelineLibraryNotebookArgs{
						Path: dltDemo.ID(),
					},
				},
				&databricks.PipelineLibraryArgs{
					File: &databricks.PipelineLibraryFileArgs{
						Path: dltDemoRepo.Path.ApplyT(func(path string) (string, error) {
							return fmt.Sprintf("%v/pipeline.sql", path), nil
						}).(pulumi.StringOutput),
					},
				},
			},
			Continuous: pulumi.Bool(false),
			Notifications: databricks.PipelineNotificationArray{
				&databricks.PipelineNotificationArgs{
					EmailRecipients: pulumi.StringArray{
						pulumi.String("user@domain.com"),
						pulumi.String("user1@domain.com"),
					},
					Alerts: pulumi.StringArray{
						pulumi.String("on-update-failure"),
						pulumi.String("on-update-fatal-failure"),
						pulumi.String("on-update-success"),
						pulumi.String("on-flow-failure"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var dltDemo = new Databricks.Notebook("dlt_demo");

    var dltDemoRepo = new Databricks.Repo("dlt_demo");

    var @this = new Databricks.Pipeline("this", new()
    {
        Name = "Pipeline Name",
        Storage = "/test/first-pipeline",
        Configuration = 
        {
            { "key1", "value1" },
            { "key2", "value2" },
        },
        Clusters = new[]
        {
            new Databricks.Inputs.PipelineClusterArgs
            {
                Label = "default",
                NumWorkers = 2,
                CustomTags = 
                {
                    { "cluster_type", "default" },
                },
            },
            new Databricks.Inputs.PipelineClusterArgs
            {
                Label = "maintenance",
                NumWorkers = 1,
                CustomTags = 
                {
                    { "cluster_type", "maintenance" },
                },
            },
        },
        Libraries = new[]
        {
            new Databricks.Inputs.PipelineLibraryArgs
            {
                Notebook = new Databricks.Inputs.PipelineLibraryNotebookArgs
                {
                    Path = dltDemo.Id,
                },
            },
            new Databricks.Inputs.PipelineLibraryArgs
            {
                File = new Databricks.Inputs.PipelineLibraryFileArgs
                {
                    Path = dltDemoRepo.Path.Apply(path => $"{path}/pipeline.sql"),
                },
            },
        },
        Continuous = false,
        Notifications = new[]
        {
            new Databricks.Inputs.PipelineNotificationArgs
            {
                EmailRecipients = new[]
                {
                    "user@domain.com",
                    "user1@domain.com",
                },
                Alerts = new[]
                {
                    "on-update-failure",
                    "on-update-fatal-failure",
                    "on-update-success",
                    "on-flow-failure",
                },
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Notebook;
import com.pulumi.databricks.Repo;
import com.pulumi.databricks.Pipeline;
import com.pulumi.databricks.PipelineArgs;
import com.pulumi.databricks.inputs.PipelineClusterArgs;
import com.pulumi.databricks.inputs.PipelineLibraryArgs;
import com.pulumi.databricks.inputs.PipelineLibraryNotebookArgs;
import com.pulumi.databricks.inputs.PipelineLibraryFileArgs;
import com.pulumi.databricks.inputs.PipelineNotificationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var dltDemo = new Notebook("dltDemo");

        var dltDemoRepo = new Repo("dltDemoRepo");

        var this_ = new Pipeline("this", PipelineArgs.builder()
            .name("Pipeline Name")
            .storage("/test/first-pipeline")
            .configuration(Map.ofEntries(
                Map.entry("key1", "value1"),
                Map.entry("key2", "value2")
            ))
            .clusters(            
                PipelineClusterArgs.builder()
                    .label("default")
                    .numWorkers(2)
                    .customTags(Map.of("cluster_type", "default"))
                    .build(),
                PipelineClusterArgs.builder()
                    .label("maintenance")
                    .numWorkers(1)
                    .customTags(Map.of("cluster_type", "maintenance"))
                    .build())
            .libraries(            
                PipelineLibraryArgs.builder()
                    .notebook(PipelineLibraryNotebookArgs.builder()
                        .path(dltDemo.id())
                        .build())
                    .build(),
                PipelineLibraryArgs.builder()
                    .file(PipelineLibraryFileArgs.builder()
                        .path(dltDemoRepo.path().applyValue(path -> String.format("%s/pipeline.sql", path)))
                        .build())
                    .build())
            .continuous(false)
            .notifications(PipelineNotificationArgs.builder()
                .emailRecipients(                
                    "user@domain.com",
                    "user1@domain.com")
                .alerts(                
                    "on-update-failure",
                    "on-update-fatal-failure",
                    "on-update-success",
                    "on-flow-failure")
                .build())
            .build());

    }
}
Copy
resources:
  dltDemo:
    type: databricks:Notebook
    name: dlt_demo
  dltDemoRepo:
    type: databricks:Repo
    name: dlt_demo
  this:
    type: databricks:Pipeline
    properties:
      name: Pipeline Name
      storage: /test/first-pipeline
      configuration:
        key1: value1
        key2: value2
      clusters:
        - label: default
          numWorkers: 2
          customTags:
            cluster_type: default
        - label: maintenance
          numWorkers: 1
          customTags:
            cluster_type: maintenance
      libraries:
        - notebook:
            path: ${dltDemo.id}
        - file:
            path: ${dltDemoRepo.path}/pipeline.sql
      continuous: false
      notifications:
        - emailRecipients:
            - user@domain.com
            - user1@domain.com
          alerts:
            - on-update-failure
            - on-update-fatal-failure
            - on-update-success
            - on-flow-failure
Copy

The following resources are often used in the same context:

Create Pipeline Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new Pipeline(name: string, args?: PipelineArgs, opts?: CustomResourceOptions);
@overload
def Pipeline(resource_name: str,
             args: Optional[PipelineArgs] = None,
             opts: Optional[ResourceOptions] = None)

@overload
def Pipeline(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             allow_duplicate_names: Optional[bool] = None,
             budget_policy_id: Optional[str] = None,
             catalog: Optional[str] = None,
             cause: Optional[str] = None,
             channel: Optional[str] = None,
             cluster_id: Optional[str] = None,
             clusters: Optional[Sequence[PipelineClusterArgs]] = None,
             configuration: Optional[Mapping[str, str]] = None,
             continuous: Optional[bool] = None,
             creator_user_name: Optional[str] = None,
             deployment: Optional[PipelineDeploymentArgs] = None,
             development: Optional[bool] = None,
             edition: Optional[str] = None,
             event_log: Optional[PipelineEventLogArgs] = None,
             expected_last_modified: Optional[int] = None,
             filters: Optional[PipelineFiltersArgs] = None,
             gateway_definition: Optional[PipelineGatewayDefinitionArgs] = None,
             health: Optional[str] = None,
             ingestion_definition: Optional[PipelineIngestionDefinitionArgs] = None,
             last_modified: Optional[int] = None,
             latest_updates: Optional[Sequence[PipelineLatestUpdateArgs]] = None,
             libraries: Optional[Sequence[PipelineLibraryArgs]] = None,
             name: Optional[str] = None,
             notifications: Optional[Sequence[PipelineNotificationArgs]] = None,
             photon: Optional[bool] = None,
             restart_window: Optional[PipelineRestartWindowArgs] = None,
             run_as: Optional[PipelineRunAsArgs] = None,
             schema: Optional[str] = None,
             serverless: Optional[bool] = None,
             state: Optional[str] = None,
             storage: Optional[str] = None,
             target: Optional[str] = None,
             trigger: Optional[PipelineTriggerArgs] = None,
             url: Optional[str] = None)
func NewPipeline(ctx *Context, name string, args *PipelineArgs, opts ...ResourceOption) (*Pipeline, error)
public Pipeline(string name, PipelineArgs? args = null, CustomResourceOptions? opts = null)
public Pipeline(String name, PipelineArgs args)
public Pipeline(String name, PipelineArgs args, CustomResourceOptions options)
type: databricks:Pipeline
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args PipelineArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args PipelineArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args PipelineArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args PipelineArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. PipelineArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var pipelineResource = new Databricks.Pipeline("pipelineResource", new()
{
    AllowDuplicateNames = false,
    BudgetPolicyId = "string",
    Catalog = "string",
    Cause = "string",
    Channel = "string",
    ClusterId = "string",
    Clusters = new[]
    {
        new Databricks.Inputs.PipelineClusterArgs
        {
            ApplyPolicyDefaultValues = false,
            Autoscale = new Databricks.Inputs.PipelineClusterAutoscaleArgs
            {
                MaxWorkers = 0,
                MinWorkers = 0,
                Mode = "string",
            },
            AwsAttributes = new Databricks.Inputs.PipelineClusterAwsAttributesArgs
            {
                Availability = "string",
                EbsVolumeCount = 0,
                EbsVolumeIops = 0,
                EbsVolumeSize = 0,
                EbsVolumeThroughput = 0,
                EbsVolumeType = "string",
                FirstOnDemand = 0,
                InstanceProfileArn = "string",
                SpotBidPricePercent = 0,
                ZoneId = "string",
            },
            AzureAttributes = new Databricks.Inputs.PipelineClusterAzureAttributesArgs
            {
                Availability = "string",
                FirstOnDemand = 0,
                LogAnalyticsInfo = new Databricks.Inputs.PipelineClusterAzureAttributesLogAnalyticsInfoArgs
                {
                    LogAnalyticsPrimaryKey = "string",
                    LogAnalyticsWorkspaceId = "string",
                },
                SpotBidMaxPrice = 0,
            },
            ClusterLogConf = new Databricks.Inputs.PipelineClusterClusterLogConfArgs
            {
                Dbfs = new Databricks.Inputs.PipelineClusterClusterLogConfDbfsArgs
                {
                    Destination = "string",
                },
                S3 = new Databricks.Inputs.PipelineClusterClusterLogConfS3Args
                {
                    Destination = "string",
                    CannedAcl = "string",
                    EnableEncryption = false,
                    EncryptionType = "string",
                    Endpoint = "string",
                    KmsKey = "string",
                    Region = "string",
                },
                Volumes = new Databricks.Inputs.PipelineClusterClusterLogConfVolumesArgs
                {
                    Destination = "string",
                },
            },
            CustomTags = 
            {
                { "string", "string" },
            },
            DriverInstancePoolId = "string",
            DriverNodeTypeId = "string",
            EnableLocalDiskEncryption = false,
            GcpAttributes = new Databricks.Inputs.PipelineClusterGcpAttributesArgs
            {
                Availability = "string",
                GoogleServiceAccount = "string",
                LocalSsdCount = 0,
                ZoneId = "string",
            },
            InitScripts = new[]
            {
                new Databricks.Inputs.PipelineClusterInitScriptArgs
                {
                    Abfss = new Databricks.Inputs.PipelineClusterInitScriptAbfssArgs
                    {
                        Destination = "string",
                    },
                    File = new Databricks.Inputs.PipelineClusterInitScriptFileArgs
                    {
                        Destination = "string",
                    },
                    Gcs = new Databricks.Inputs.PipelineClusterInitScriptGcsArgs
                    {
                        Destination = "string",
                    },
                    S3 = new Databricks.Inputs.PipelineClusterInitScriptS3Args
                    {
                        Destination = "string",
                        CannedAcl = "string",
                        EnableEncryption = false,
                        EncryptionType = "string",
                        Endpoint = "string",
                        KmsKey = "string",
                        Region = "string",
                    },
                    Volumes = new Databricks.Inputs.PipelineClusterInitScriptVolumesArgs
                    {
                        Destination = "string",
                    },
                    Workspace = new Databricks.Inputs.PipelineClusterInitScriptWorkspaceArgs
                    {
                        Destination = "string",
                    },
                },
            },
            InstancePoolId = "string",
            Label = "string",
            NodeTypeId = "string",
            NumWorkers = 0,
            PolicyId = "string",
            SparkConf = 
            {
                { "string", "string" },
            },
            SparkEnvVars = 
            {
                { "string", "string" },
            },
            SshPublicKeys = new[]
            {
                "string",
            },
        },
    },
    Configuration = 
    {
        { "string", "string" },
    },
    Continuous = false,
    CreatorUserName = "string",
    Deployment = new Databricks.Inputs.PipelineDeploymentArgs
    {
        Kind = "string",
        MetadataFilePath = "string",
    },
    Development = false,
    Edition = "string",
    EventLog = new Databricks.Inputs.PipelineEventLogArgs
    {
        Catalog = "string",
        Name = "string",
        Schema = "string",
    },
    ExpectedLastModified = 0,
    Filters = new Databricks.Inputs.PipelineFiltersArgs
    {
        Excludes = new[]
        {
            "string",
        },
        Includes = new[]
        {
            "string",
        },
    },
    GatewayDefinition = new Databricks.Inputs.PipelineGatewayDefinitionArgs
    {
        ConnectionId = "string",
        ConnectionName = "string",
        GatewayStorageCatalog = "string",
        GatewayStorageName = "string",
        GatewayStorageSchema = "string",
    },
    Health = "string",
    IngestionDefinition = new Databricks.Inputs.PipelineIngestionDefinitionArgs
    {
        ConnectionName = "string",
        IngestionGatewayId = "string",
        Objects = new[]
        {
            new Databricks.Inputs.PipelineIngestionDefinitionObjectArgs
            {
                Report = new Databricks.Inputs.PipelineIngestionDefinitionObjectReportArgs
                {
                    DestinationCatalog = "string",
                    DestinationSchema = "string",
                    DestinationTable = "string",
                    SourceUrl = "string",
                    TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionObjectReportTableConfigurationArgs
                    {
                        PrimaryKeys = new[]
                        {
                            "string",
                        },
                        SalesforceIncludeFormulaFields = false,
                        ScdType = "string",
                        SequenceBies = new[]
                        {
                            "string",
                        },
                    },
                },
                Schema = new Databricks.Inputs.PipelineIngestionDefinitionObjectSchemaArgs
                {
                    DestinationCatalog = "string",
                    DestinationSchema = "string",
                    SourceCatalog = "string",
                    SourceSchema = "string",
                    TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs
                    {
                        PrimaryKeys = new[]
                        {
                            "string",
                        },
                        SalesforceIncludeFormulaFields = false,
                        ScdType = "string",
                        SequenceBies = new[]
                        {
                            "string",
                        },
                    },
                },
                Table = new Databricks.Inputs.PipelineIngestionDefinitionObjectTableArgs
                {
                    DestinationCatalog = "string",
                    DestinationSchema = "string",
                    DestinationTable = "string",
                    SourceCatalog = "string",
                    SourceSchema = "string",
                    SourceTable = "string",
                    TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionObjectTableTableConfigurationArgs
                    {
                        PrimaryKeys = new[]
                        {
                            "string",
                        },
                        SalesforceIncludeFormulaFields = false,
                        ScdType = "string",
                        SequenceBies = new[]
                        {
                            "string",
                        },
                    },
                },
            },
        },
        TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionTableConfigurationArgs
        {
            PrimaryKeys = new[]
            {
                "string",
            },
            SalesforceIncludeFormulaFields = false,
            ScdType = "string",
            SequenceBies = new[]
            {
                "string",
            },
        },
    },
    LastModified = 0,
    LatestUpdates = new[]
    {
        new Databricks.Inputs.PipelineLatestUpdateArgs
        {
            CreationTime = "string",
            State = "string",
            UpdateId = "string",
        },
    },
    Libraries = new[]
    {
        new Databricks.Inputs.PipelineLibraryArgs
        {
            File = new Databricks.Inputs.PipelineLibraryFileArgs
            {
                Path = "string",
            },
            Jar = "string",
            Maven = new Databricks.Inputs.PipelineLibraryMavenArgs
            {
                Coordinates = "string",
                Exclusions = new[]
                {
                    "string",
                },
                Repo = "string",
            },
            Notebook = new Databricks.Inputs.PipelineLibraryNotebookArgs
            {
                Path = "string",
            },
        },
    },
    Name = "string",
    Notifications = new[]
    {
        new Databricks.Inputs.PipelineNotificationArgs
        {
            Alerts = new[]
            {
                "string",
            },
            EmailRecipients = new[]
            {
                "string",
            },
        },
    },
    Photon = false,
    RestartWindow = new Databricks.Inputs.PipelineRestartWindowArgs
    {
        StartHour = 0,
        DaysOfWeeks = new[]
        {
            "string",
        },
        TimeZoneId = "string",
    },
    RunAs = new Databricks.Inputs.PipelineRunAsArgs
    {
        ServicePrincipalName = "string",
        UserName = "string",
    },
    Schema = "string",
    Serverless = false,
    State = "string",
    Storage = "string",
    Target = "string",
    Trigger = new Databricks.Inputs.PipelineTriggerArgs
    {
        Cron = new Databricks.Inputs.PipelineTriggerCronArgs
        {
            QuartzCronSchedule = "string",
            TimezoneId = "string",
        },
        Manual = null,
    },
    Url = "string",
});
Copy
example, err := databricks.NewPipeline(ctx, "pipelineResource", &databricks.PipelineArgs{
	AllowDuplicateNames: pulumi.Bool(false),
	BudgetPolicyId:      pulumi.String("string"),
	Catalog:             pulumi.String("string"),
	Cause:               pulumi.String("string"),
	Channel:             pulumi.String("string"),
	ClusterId:           pulumi.String("string"),
	Clusters: databricks.PipelineClusterArray{
		&databricks.PipelineClusterArgs{
			ApplyPolicyDefaultValues: pulumi.Bool(false),
			Autoscale: &databricks.PipelineClusterAutoscaleArgs{
				MaxWorkers: pulumi.Int(0),
				MinWorkers: pulumi.Int(0),
				Mode:       pulumi.String("string"),
			},
			AwsAttributes: &databricks.PipelineClusterAwsAttributesArgs{
				Availability:        pulumi.String("string"),
				EbsVolumeCount:      pulumi.Int(0),
				EbsVolumeIops:       pulumi.Int(0),
				EbsVolumeSize:       pulumi.Int(0),
				EbsVolumeThroughput: pulumi.Int(0),
				EbsVolumeType:       pulumi.String("string"),
				FirstOnDemand:       pulumi.Int(0),
				InstanceProfileArn:  pulumi.String("string"),
				SpotBidPricePercent: pulumi.Int(0),
				ZoneId:              pulumi.String("string"),
			},
			AzureAttributes: &databricks.PipelineClusterAzureAttributesArgs{
				Availability:  pulumi.String("string"),
				FirstOnDemand: pulumi.Int(0),
				LogAnalyticsInfo: &databricks.PipelineClusterAzureAttributesLogAnalyticsInfoArgs{
					LogAnalyticsPrimaryKey:  pulumi.String("string"),
					LogAnalyticsWorkspaceId: pulumi.String("string"),
				},
				SpotBidMaxPrice: pulumi.Float64(0),
			},
			ClusterLogConf: &databricks.PipelineClusterClusterLogConfArgs{
				Dbfs: &databricks.PipelineClusterClusterLogConfDbfsArgs{
					Destination: pulumi.String("string"),
				},
				S3: &databricks.PipelineClusterClusterLogConfS3Args{
					Destination:      pulumi.String("string"),
					CannedAcl:        pulumi.String("string"),
					EnableEncryption: pulumi.Bool(false),
					EncryptionType:   pulumi.String("string"),
					Endpoint:         pulumi.String("string"),
					KmsKey:           pulumi.String("string"),
					Region:           pulumi.String("string"),
				},
				Volumes: &databricks.PipelineClusterClusterLogConfVolumesArgs{
					Destination: pulumi.String("string"),
				},
			},
			CustomTags: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			DriverInstancePoolId:      pulumi.String("string"),
			DriverNodeTypeId:          pulumi.String("string"),
			EnableLocalDiskEncryption: pulumi.Bool(false),
			GcpAttributes: &databricks.PipelineClusterGcpAttributesArgs{
				Availability:         pulumi.String("string"),
				GoogleServiceAccount: pulumi.String("string"),
				LocalSsdCount:        pulumi.Int(0),
				ZoneId:               pulumi.String("string"),
			},
			InitScripts: databricks.PipelineClusterInitScriptArray{
				&databricks.PipelineClusterInitScriptArgs{
					Abfss: &databricks.PipelineClusterInitScriptAbfssArgs{
						Destination: pulumi.String("string"),
					},
					File: &databricks.PipelineClusterInitScriptFileArgs{
						Destination: pulumi.String("string"),
					},
					Gcs: &databricks.PipelineClusterInitScriptGcsArgs{
						Destination: pulumi.String("string"),
					},
					S3: &databricks.PipelineClusterInitScriptS3Args{
						Destination:      pulumi.String("string"),
						CannedAcl:        pulumi.String("string"),
						EnableEncryption: pulumi.Bool(false),
						EncryptionType:   pulumi.String("string"),
						Endpoint:         pulumi.String("string"),
						KmsKey:           pulumi.String("string"),
						Region:           pulumi.String("string"),
					},
					Volumes: &databricks.PipelineClusterInitScriptVolumesArgs{
						Destination: pulumi.String("string"),
					},
					Workspace: &databricks.PipelineClusterInitScriptWorkspaceArgs{
						Destination: pulumi.String("string"),
					},
				},
			},
			InstancePoolId: pulumi.String("string"),
			Label:          pulumi.String("string"),
			NodeTypeId:     pulumi.String("string"),
			NumWorkers:     pulumi.Int(0),
			PolicyId:       pulumi.String("string"),
			SparkConf: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			SparkEnvVars: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			SshPublicKeys: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
	},
	Configuration: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Continuous:      pulumi.Bool(false),
	CreatorUserName: pulumi.String("string"),
	Deployment: &databricks.PipelineDeploymentArgs{
		Kind:             pulumi.String("string"),
		MetadataFilePath: pulumi.String("string"),
	},
	Development: pulumi.Bool(false),
	Edition:     pulumi.String("string"),
	EventLog: &databricks.PipelineEventLogArgs{
		Catalog: pulumi.String("string"),
		Name:    pulumi.String("string"),
		Schema:  pulumi.String("string"),
	},
	ExpectedLastModified: pulumi.Int(0),
	Filters: &databricks.PipelineFiltersArgs{
		Excludes: pulumi.StringArray{
			pulumi.String("string"),
		},
		Includes: pulumi.StringArray{
			pulumi.String("string"),
		},
	},
	GatewayDefinition: &databricks.PipelineGatewayDefinitionArgs{
		ConnectionId:          pulumi.String("string"),
		ConnectionName:        pulumi.String("string"),
		GatewayStorageCatalog: pulumi.String("string"),
		GatewayStorageName:    pulumi.String("string"),
		GatewayStorageSchema:  pulumi.String("string"),
	},
	Health: pulumi.String("string"),
	IngestionDefinition: &databricks.PipelineIngestionDefinitionArgs{
		ConnectionName:     pulumi.String("string"),
		IngestionGatewayId: pulumi.String("string"),
		Objects: databricks.PipelineIngestionDefinitionObjectArray{
			&databricks.PipelineIngestionDefinitionObjectArgs{
				Report: &databricks.PipelineIngestionDefinitionObjectReportArgs{
					DestinationCatalog: pulumi.String("string"),
					DestinationSchema:  pulumi.String("string"),
					DestinationTable:   pulumi.String("string"),
					SourceUrl:          pulumi.String("string"),
					TableConfiguration: &databricks.PipelineIngestionDefinitionObjectReportTableConfigurationArgs{
						PrimaryKeys: pulumi.StringArray{
							pulumi.String("string"),
						},
						SalesforceIncludeFormulaFields: pulumi.Bool(false),
						ScdType:                        pulumi.String("string"),
						SequenceBies: pulumi.StringArray{
							pulumi.String("string"),
						},
					},
				},
				Schema: &databricks.PipelineIngestionDefinitionObjectSchemaArgs{
					DestinationCatalog: pulumi.String("string"),
					DestinationSchema:  pulumi.String("string"),
					SourceCatalog:      pulumi.String("string"),
					SourceSchema:       pulumi.String("string"),
					TableConfiguration: &databricks.PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs{
						PrimaryKeys: pulumi.StringArray{
							pulumi.String("string"),
						},
						SalesforceIncludeFormulaFields: pulumi.Bool(false),
						ScdType:                        pulumi.String("string"),
						SequenceBies: pulumi.StringArray{
							pulumi.String("string"),
						},
					},
				},
				Table: &databricks.PipelineIngestionDefinitionObjectTableArgs{
					DestinationCatalog: pulumi.String("string"),
					DestinationSchema:  pulumi.String("string"),
					DestinationTable:   pulumi.String("string"),
					SourceCatalog:      pulumi.String("string"),
					SourceSchema:       pulumi.String("string"),
					SourceTable:        pulumi.String("string"),
					TableConfiguration: &databricks.PipelineIngestionDefinitionObjectTableTableConfigurationArgs{
						PrimaryKeys: pulumi.StringArray{
							pulumi.String("string"),
						},
						SalesforceIncludeFormulaFields: pulumi.Bool(false),
						ScdType:                        pulumi.String("string"),
						SequenceBies: pulumi.StringArray{
							pulumi.String("string"),
						},
					},
				},
			},
		},
		TableConfiguration: &databricks.PipelineIngestionDefinitionTableConfigurationArgs{
			PrimaryKeys: pulumi.StringArray{
				pulumi.String("string"),
			},
			SalesforceIncludeFormulaFields: pulumi.Bool(false),
			ScdType:                        pulumi.String("string"),
			SequenceBies: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
	},
	LastModified: pulumi.Int(0),
	LatestUpdates: databricks.PipelineLatestUpdateArray{
		&databricks.PipelineLatestUpdateArgs{
			CreationTime: pulumi.String("string"),
			State:        pulumi.String("string"),
			UpdateId:     pulumi.String("string"),
		},
	},
	Libraries: databricks.PipelineLibraryArray{
		&databricks.PipelineLibraryArgs{
			File: &databricks.PipelineLibraryFileArgs{
				Path: pulumi.String("string"),
			},
			Jar: pulumi.String("string"),
			Maven: &databricks.PipelineLibraryMavenArgs{
				Coordinates: pulumi.String("string"),
				Exclusions: pulumi.StringArray{
					pulumi.String("string"),
				},
				Repo: pulumi.String("string"),
			},
			Notebook: &databricks.PipelineLibraryNotebookArgs{
				Path: pulumi.String("string"),
			},
		},
	},
	Name: pulumi.String("string"),
	Notifications: databricks.PipelineNotificationArray{
		&databricks.PipelineNotificationArgs{
			Alerts: pulumi.StringArray{
				pulumi.String("string"),
			},
			EmailRecipients: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
	},
	Photon: pulumi.Bool(false),
	RestartWindow: &databricks.PipelineRestartWindowArgs{
		StartHour: pulumi.Int(0),
		DaysOfWeeks: pulumi.StringArray{
			pulumi.String("string"),
		},
		TimeZoneId: pulumi.String("string"),
	},
	RunAs: &databricks.PipelineRunAsArgs{
		ServicePrincipalName: pulumi.String("string"),
		UserName:             pulumi.String("string"),
	},
	Schema:     pulumi.String("string"),
	Serverless: pulumi.Bool(false),
	State:      pulumi.String("string"),
	Storage:    pulumi.String("string"),
	Target:     pulumi.String("string"),
	Trigger: &databricks.PipelineTriggerArgs{
		Cron: &databricks.PipelineTriggerCronArgs{
			QuartzCronSchedule: pulumi.String("string"),
			TimezoneId:         pulumi.String("string"),
		},
		Manual: &databricks.PipelineTriggerManualArgs{},
	},
	Url: pulumi.String("string"),
})
Copy
var pipelineResource = new Pipeline("pipelineResource", PipelineArgs.builder()
    .allowDuplicateNames(false)
    .budgetPolicyId("string")
    .catalog("string")
    .cause("string")
    .channel("string")
    .clusterId("string")
    .clusters(PipelineClusterArgs.builder()
        .applyPolicyDefaultValues(false)
        .autoscale(PipelineClusterAutoscaleArgs.builder()
            .maxWorkers(0)
            .minWorkers(0)
            .mode("string")
            .build())
        .awsAttributes(PipelineClusterAwsAttributesArgs.builder()
            .availability("string")
            .ebsVolumeCount(0)
            .ebsVolumeIops(0)
            .ebsVolumeSize(0)
            .ebsVolumeThroughput(0)
            .ebsVolumeType("string")
            .firstOnDemand(0)
            .instanceProfileArn("string")
            .spotBidPricePercent(0)
            .zoneId("string")
            .build())
        .azureAttributes(PipelineClusterAzureAttributesArgs.builder()
            .availability("string")
            .firstOnDemand(0)
            .logAnalyticsInfo(PipelineClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                .logAnalyticsPrimaryKey("string")
                .logAnalyticsWorkspaceId("string")
                .build())
            .spotBidMaxPrice(0)
            .build())
        .clusterLogConf(PipelineClusterClusterLogConfArgs.builder()
            .dbfs(PipelineClusterClusterLogConfDbfsArgs.builder()
                .destination("string")
                .build())
            .s3(PipelineClusterClusterLogConfS3Args.builder()
                .destination("string")
                .cannedAcl("string")
                .enableEncryption(false)
                .encryptionType("string")
                .endpoint("string")
                .kmsKey("string")
                .region("string")
                .build())
            .volumes(PipelineClusterClusterLogConfVolumesArgs.builder()
                .destination("string")
                .build())
            .build())
        .customTags(Map.of("string", "string"))
        .driverInstancePoolId("string")
        .driverNodeTypeId("string")
        .enableLocalDiskEncryption(false)
        .gcpAttributes(PipelineClusterGcpAttributesArgs.builder()
            .availability("string")
            .googleServiceAccount("string")
            .localSsdCount(0)
            .zoneId("string")
            .build())
        .initScripts(PipelineClusterInitScriptArgs.builder()
            .abfss(PipelineClusterInitScriptAbfssArgs.builder()
                .destination("string")
                .build())
            .file(PipelineClusterInitScriptFileArgs.builder()
                .destination("string")
                .build())
            .gcs(PipelineClusterInitScriptGcsArgs.builder()
                .destination("string")
                .build())
            .s3(PipelineClusterInitScriptS3Args.builder()
                .destination("string")
                .cannedAcl("string")
                .enableEncryption(false)
                .encryptionType("string")
                .endpoint("string")
                .kmsKey("string")
                .region("string")
                .build())
            .volumes(PipelineClusterInitScriptVolumesArgs.builder()
                .destination("string")
                .build())
            .workspace(PipelineClusterInitScriptWorkspaceArgs.builder()
                .destination("string")
                .build())
            .build())
        .instancePoolId("string")
        .label("string")
        .nodeTypeId("string")
        .numWorkers(0)
        .policyId("string")
        .sparkConf(Map.of("string", "string"))
        .sparkEnvVars(Map.of("string", "string"))
        .sshPublicKeys("string")
        .build())
    .configuration(Map.of("string", "string"))
    .continuous(false)
    .creatorUserName("string")
    .deployment(PipelineDeploymentArgs.builder()
        .kind("string")
        .metadataFilePath("string")
        .build())
    .development(false)
    .edition("string")
    .eventLog(PipelineEventLogArgs.builder()
        .catalog("string")
        .name("string")
        .schema("string")
        .build())
    .expectedLastModified(0)
    .filters(PipelineFiltersArgs.builder()
        .excludes("string")
        .includes("string")
        .build())
    .gatewayDefinition(PipelineGatewayDefinitionArgs.builder()
        .connectionId("string")
        .connectionName("string")
        .gatewayStorageCatalog("string")
        .gatewayStorageName("string")
        .gatewayStorageSchema("string")
        .build())
    .health("string")
    .ingestionDefinition(PipelineIngestionDefinitionArgs.builder()
        .connectionName("string")
        .ingestionGatewayId("string")
        .objects(PipelineIngestionDefinitionObjectArgs.builder()
            .report(PipelineIngestionDefinitionObjectReportArgs.builder()
                .destinationCatalog("string")
                .destinationSchema("string")
                .destinationTable("string")
                .sourceUrl("string")
                .tableConfiguration(PipelineIngestionDefinitionObjectReportTableConfigurationArgs.builder()
                    .primaryKeys("string")
                    .salesforceIncludeFormulaFields(false)
                    .scdType("string")
                    .sequenceBies("string")
                    .build())
                .build())
            .schema(PipelineIngestionDefinitionObjectSchemaArgs.builder()
                .destinationCatalog("string")
                .destinationSchema("string")
                .sourceCatalog("string")
                .sourceSchema("string")
                .tableConfiguration(PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs.builder()
                    .primaryKeys("string")
                    .salesforceIncludeFormulaFields(false)
                    .scdType("string")
                    .sequenceBies("string")
                    .build())
                .build())
            .table(PipelineIngestionDefinitionObjectTableArgs.builder()
                .destinationCatalog("string")
                .destinationSchema("string")
                .destinationTable("string")
                .sourceCatalog("string")
                .sourceSchema("string")
                .sourceTable("string")
                .tableConfiguration(PipelineIngestionDefinitionObjectTableTableConfigurationArgs.builder()
                    .primaryKeys("string")
                    .salesforceIncludeFormulaFields(false)
                    .scdType("string")
                    .sequenceBies("string")
                    .build())
                .build())
            .build())
        .tableConfiguration(PipelineIngestionDefinitionTableConfigurationArgs.builder()
            .primaryKeys("string")
            .salesforceIncludeFormulaFields(false)
            .scdType("string")
            .sequenceBies("string")
            .build())
        .build())
    .lastModified(0)
    .latestUpdates(PipelineLatestUpdateArgs.builder()
        .creationTime("string")
        .state("string")
        .updateId("string")
        .build())
    .libraries(PipelineLibraryArgs.builder()
        .file(PipelineLibraryFileArgs.builder()
            .path("string")
            .build())
        .jar("string")
        .maven(PipelineLibraryMavenArgs.builder()
            .coordinates("string")
            .exclusions("string")
            .repo("string")
            .build())
        .notebook(PipelineLibraryNotebookArgs.builder()
            .path("string")
            .build())
        .build())
    .name("string")
    .notifications(PipelineNotificationArgs.builder()
        .alerts("string")
        .emailRecipients("string")
        .build())
    .photon(false)
    .restartWindow(PipelineRestartWindowArgs.builder()
        .startHour(0)
        .daysOfWeeks("string")
        .timeZoneId("string")
        .build())
    .runAs(PipelineRunAsArgs.builder()
        .servicePrincipalName("string")
        .userName("string")
        .build())
    .schema("string")
    .serverless(false)
    .state("string")
    .storage("string")
    .target("string")
    .trigger(PipelineTriggerArgs.builder()
        .cron(PipelineTriggerCronArgs.builder()
            .quartzCronSchedule("string")
            .timezoneId("string")
            .build())
        .manual()
        .build())
    .url("string")
    .build());
Copy
pipeline_resource = databricks.Pipeline("pipelineResource",
    allow_duplicate_names=False,
    budget_policy_id="string",
    catalog="string",
    cause="string",
    channel="string",
    cluster_id="string",
    clusters=[{
        "apply_policy_default_values": False,
        "autoscale": {
            "max_workers": 0,
            "min_workers": 0,
            "mode": "string",
        },
        "aws_attributes": {
            "availability": "string",
            "ebs_volume_count": 0,
            "ebs_volume_iops": 0,
            "ebs_volume_size": 0,
            "ebs_volume_throughput": 0,
            "ebs_volume_type": "string",
            "first_on_demand": 0,
            "instance_profile_arn": "string",
            "spot_bid_price_percent": 0,
            "zone_id": "string",
        },
        "azure_attributes": {
            "availability": "string",
            "first_on_demand": 0,
            "log_analytics_info": {
                "log_analytics_primary_key": "string",
                "log_analytics_workspace_id": "string",
            },
            "spot_bid_max_price": 0,
        },
        "cluster_log_conf": {
            "dbfs": {
                "destination": "string",
            },
            "s3": {
                "destination": "string",
                "canned_acl": "string",
                "enable_encryption": False,
                "encryption_type": "string",
                "endpoint": "string",
                "kms_key": "string",
                "region": "string",
            },
            "volumes": {
                "destination": "string",
            },
        },
        "custom_tags": {
            "string": "string",
        },
        "driver_instance_pool_id": "string",
        "driver_node_type_id": "string",
        "enable_local_disk_encryption": False,
        "gcp_attributes": {
            "availability": "string",
            "google_service_account": "string",
            "local_ssd_count": 0,
            "zone_id": "string",
        },
        "init_scripts": [{
            "abfss": {
                "destination": "string",
            },
            "file": {
                "destination": "string",
            },
            "gcs": {
                "destination": "string",
            },
            "s3": {
                "destination": "string",
                "canned_acl": "string",
                "enable_encryption": False,
                "encryption_type": "string",
                "endpoint": "string",
                "kms_key": "string",
                "region": "string",
            },
            "volumes": {
                "destination": "string",
            },
            "workspace": {
                "destination": "string",
            },
        }],
        "instance_pool_id": "string",
        "label": "string",
        "node_type_id": "string",
        "num_workers": 0,
        "policy_id": "string",
        "spark_conf": {
            "string": "string",
        },
        "spark_env_vars": {
            "string": "string",
        },
        "ssh_public_keys": ["string"],
    }],
    configuration={
        "string": "string",
    },
    continuous=False,
    creator_user_name="string",
    deployment={
        "kind": "string",
        "metadata_file_path": "string",
    },
    development=False,
    edition="string",
    event_log={
        "catalog": "string",
        "name": "string",
        "schema": "string",
    },
    expected_last_modified=0,
    filters={
        "excludes": ["string"],
        "includes": ["string"],
    },
    gateway_definition={
        "connection_id": "string",
        "connection_name": "string",
        "gateway_storage_catalog": "string",
        "gateway_storage_name": "string",
        "gateway_storage_schema": "string",
    },
    health="string",
    ingestion_definition={
        "connection_name": "string",
        "ingestion_gateway_id": "string",
        "objects": [{
            "report": {
                "destination_catalog": "string",
                "destination_schema": "string",
                "destination_table": "string",
                "source_url": "string",
                "table_configuration": {
                    "primary_keys": ["string"],
                    "salesforce_include_formula_fields": False,
                    "scd_type": "string",
                    "sequence_bies": ["string"],
                },
            },
            "schema": {
                "destination_catalog": "string",
                "destination_schema": "string",
                "source_catalog": "string",
                "source_schema": "string",
                "table_configuration": {
                    "primary_keys": ["string"],
                    "salesforce_include_formula_fields": False,
                    "scd_type": "string",
                    "sequence_bies": ["string"],
                },
            },
            "table": {
                "destination_catalog": "string",
                "destination_schema": "string",
                "destination_table": "string",
                "source_catalog": "string",
                "source_schema": "string",
                "source_table": "string",
                "table_configuration": {
                    "primary_keys": ["string"],
                    "salesforce_include_formula_fields": False,
                    "scd_type": "string",
                    "sequence_bies": ["string"],
                },
            },
        }],
        "table_configuration": {
            "primary_keys": ["string"],
            "salesforce_include_formula_fields": False,
            "scd_type": "string",
            "sequence_bies": ["string"],
        },
    },
    last_modified=0,
    latest_updates=[{
        "creation_time": "string",
        "state": "string",
        "update_id": "string",
    }],
    libraries=[{
        "file": {
            "path": "string",
        },
        "jar": "string",
        "maven": {
            "coordinates": "string",
            "exclusions": ["string"],
            "repo": "string",
        },
        "notebook": {
            "path": "string",
        },
    }],
    name="string",
    notifications=[{
        "alerts": ["string"],
        "email_recipients": ["string"],
    }],
    photon=False,
    restart_window={
        "start_hour": 0,
        "days_of_weeks": ["string"],
        "time_zone_id": "string",
    },
    run_as={
        "service_principal_name": "string",
        "user_name": "string",
    },
    schema="string",
    serverless=False,
    state="string",
    storage="string",
    target="string",
    trigger={
        "cron": {
            "quartz_cron_schedule": "string",
            "timezone_id": "string",
        },
        "manual": {},
    },
    url="string")
Copy
const pipelineResource = new databricks.Pipeline("pipelineResource", {
    allowDuplicateNames: false,
    budgetPolicyId: "string",
    catalog: "string",
    cause: "string",
    channel: "string",
    clusterId: "string",
    clusters: [{
        applyPolicyDefaultValues: false,
        autoscale: {
            maxWorkers: 0,
            minWorkers: 0,
            mode: "string",
        },
        awsAttributes: {
            availability: "string",
            ebsVolumeCount: 0,
            ebsVolumeIops: 0,
            ebsVolumeSize: 0,
            ebsVolumeThroughput: 0,
            ebsVolumeType: "string",
            firstOnDemand: 0,
            instanceProfileArn: "string",
            spotBidPricePercent: 0,
            zoneId: "string",
        },
        azureAttributes: {
            availability: "string",
            firstOnDemand: 0,
            logAnalyticsInfo: {
                logAnalyticsPrimaryKey: "string",
                logAnalyticsWorkspaceId: "string",
            },
            spotBidMaxPrice: 0,
        },
        clusterLogConf: {
            dbfs: {
                destination: "string",
            },
            s3: {
                destination: "string",
                cannedAcl: "string",
                enableEncryption: false,
                encryptionType: "string",
                endpoint: "string",
                kmsKey: "string",
                region: "string",
            },
            volumes: {
                destination: "string",
            },
        },
        customTags: {
            string: "string",
        },
        driverInstancePoolId: "string",
        driverNodeTypeId: "string",
        enableLocalDiskEncryption: false,
        gcpAttributes: {
            availability: "string",
            googleServiceAccount: "string",
            localSsdCount: 0,
            zoneId: "string",
        },
        initScripts: [{
            abfss: {
                destination: "string",
            },
            file: {
                destination: "string",
            },
            gcs: {
                destination: "string",
            },
            s3: {
                destination: "string",
                cannedAcl: "string",
                enableEncryption: false,
                encryptionType: "string",
                endpoint: "string",
                kmsKey: "string",
                region: "string",
            },
            volumes: {
                destination: "string",
            },
            workspace: {
                destination: "string",
            },
        }],
        instancePoolId: "string",
        label: "string",
        nodeTypeId: "string",
        numWorkers: 0,
        policyId: "string",
        sparkConf: {
            string: "string",
        },
        sparkEnvVars: {
            string: "string",
        },
        sshPublicKeys: ["string"],
    }],
    configuration: {
        string: "string",
    },
    continuous: false,
    creatorUserName: "string",
    deployment: {
        kind: "string",
        metadataFilePath: "string",
    },
    development: false,
    edition: "string",
    eventLog: {
        catalog: "string",
        name: "string",
        schema: "string",
    },
    expectedLastModified: 0,
    filters: {
        excludes: ["string"],
        includes: ["string"],
    },
    gatewayDefinition: {
        connectionId: "string",
        connectionName: "string",
        gatewayStorageCatalog: "string",
        gatewayStorageName: "string",
        gatewayStorageSchema: "string",
    },
    health: "string",
    ingestionDefinition: {
        connectionName: "string",
        ingestionGatewayId: "string",
        objects: [{
            report: {
                destinationCatalog: "string",
                destinationSchema: "string",
                destinationTable: "string",
                sourceUrl: "string",
                tableConfiguration: {
                    primaryKeys: ["string"],
                    salesforceIncludeFormulaFields: false,
                    scdType: "string",
                    sequenceBies: ["string"],
                },
            },
            schema: {
                destinationCatalog: "string",
                destinationSchema: "string",
                sourceCatalog: "string",
                sourceSchema: "string",
                tableConfiguration: {
                    primaryKeys: ["string"],
                    salesforceIncludeFormulaFields: false,
                    scdType: "string",
                    sequenceBies: ["string"],
                },
            },
            table: {
                destinationCatalog: "string",
                destinationSchema: "string",
                destinationTable: "string",
                sourceCatalog: "string",
                sourceSchema: "string",
                sourceTable: "string",
                tableConfiguration: {
                    primaryKeys: ["string"],
                    salesforceIncludeFormulaFields: false,
                    scdType: "string",
                    sequenceBies: ["string"],
                },
            },
        }],
        tableConfiguration: {
            primaryKeys: ["string"],
            salesforceIncludeFormulaFields: false,
            scdType: "string",
            sequenceBies: ["string"],
        },
    },
    lastModified: 0,
    latestUpdates: [{
        creationTime: "string",
        state: "string",
        updateId: "string",
    }],
    libraries: [{
        file: {
            path: "string",
        },
        jar: "string",
        maven: {
            coordinates: "string",
            exclusions: ["string"],
            repo: "string",
        },
        notebook: {
            path: "string",
        },
    }],
    name: "string",
    notifications: [{
        alerts: ["string"],
        emailRecipients: ["string"],
    }],
    photon: false,
    restartWindow: {
        startHour: 0,
        daysOfWeeks: ["string"],
        timeZoneId: "string",
    },
    runAs: {
        servicePrincipalName: "string",
        userName: "string",
    },
    schema: "string",
    serverless: false,
    state: "string",
    storage: "string",
    target: "string",
    trigger: {
        cron: {
            quartzCronSchedule: "string",
            timezoneId: "string",
        },
        manual: {},
    },
    url: "string",
});
Copy
type: databricks:Pipeline
properties:
    allowDuplicateNames: false
    budgetPolicyId: string
    catalog: string
    cause: string
    channel: string
    clusterId: string
    clusters:
        - applyPolicyDefaultValues: false
          autoscale:
            maxWorkers: 0
            minWorkers: 0
            mode: string
          awsAttributes:
            availability: string
            ebsVolumeCount: 0
            ebsVolumeIops: 0
            ebsVolumeSize: 0
            ebsVolumeThroughput: 0
            ebsVolumeType: string
            firstOnDemand: 0
            instanceProfileArn: string
            spotBidPricePercent: 0
            zoneId: string
          azureAttributes:
            availability: string
            firstOnDemand: 0
            logAnalyticsInfo:
                logAnalyticsPrimaryKey: string
                logAnalyticsWorkspaceId: string
            spotBidMaxPrice: 0
          clusterLogConf:
            dbfs:
                destination: string
            s3:
                cannedAcl: string
                destination: string
                enableEncryption: false
                encryptionType: string
                endpoint: string
                kmsKey: string
                region: string
            volumes:
                destination: string
          customTags:
            string: string
          driverInstancePoolId: string
          driverNodeTypeId: string
          enableLocalDiskEncryption: false
          gcpAttributes:
            availability: string
            googleServiceAccount: string
            localSsdCount: 0
            zoneId: string
          initScripts:
            - abfss:
                destination: string
              file:
                destination: string
              gcs:
                destination: string
              s3:
                cannedAcl: string
                destination: string
                enableEncryption: false
                encryptionType: string
                endpoint: string
                kmsKey: string
                region: string
              volumes:
                destination: string
              workspace:
                destination: string
          instancePoolId: string
          label: string
          nodeTypeId: string
          numWorkers: 0
          policyId: string
          sparkConf:
            string: string
          sparkEnvVars:
            string: string
          sshPublicKeys:
            - string
    configuration:
        string: string
    continuous: false
    creatorUserName: string
    deployment:
        kind: string
        metadataFilePath: string
    development: false
    edition: string
    eventLog:
        catalog: string
        name: string
        schema: string
    expectedLastModified: 0
    filters:
        excludes:
            - string
        includes:
            - string
    gatewayDefinition:
        connectionId: string
        connectionName: string
        gatewayStorageCatalog: string
        gatewayStorageName: string
        gatewayStorageSchema: string
    health: string
    ingestionDefinition:
        connectionName: string
        ingestionGatewayId: string
        objects:
            - report:
                destinationCatalog: string
                destinationSchema: string
                destinationTable: string
                sourceUrl: string
                tableConfiguration:
                    primaryKeys:
                        - string
                    salesforceIncludeFormulaFields: false
                    scdType: string
                    sequenceBies:
                        - string
              schema:
                destinationCatalog: string
                destinationSchema: string
                sourceCatalog: string
                sourceSchema: string
                tableConfiguration:
                    primaryKeys:
                        - string
                    salesforceIncludeFormulaFields: false
                    scdType: string
                    sequenceBies:
                        - string
              table:
                destinationCatalog: string
                destinationSchema: string
                destinationTable: string
                sourceCatalog: string
                sourceSchema: string
                sourceTable: string
                tableConfiguration:
                    primaryKeys:
                        - string
                    salesforceIncludeFormulaFields: false
                    scdType: string
                    sequenceBies:
                        - string
        tableConfiguration:
            primaryKeys:
                - string
            salesforceIncludeFormulaFields: false
            scdType: string
            sequenceBies:
                - string
    lastModified: 0
    latestUpdates:
        - creationTime: string
          state: string
          updateId: string
    libraries:
        - file:
            path: string
          jar: string
          maven:
            coordinates: string
            exclusions:
                - string
            repo: string
          notebook:
            path: string
    name: string
    notifications:
        - alerts:
            - string
          emailRecipients:
            - string
    photon: false
    restartWindow:
        daysOfWeeks:
            - string
        startHour: 0
        timeZoneId: string
    runAs:
        servicePrincipalName: string
        userName: string
    schema: string
    serverless: false
    state: string
    storage: string
    target: string
    trigger:
        cron:
            quartzCronSchedule: string
            timezoneId: string
        manual: {}
    url: string
Copy

Pipeline Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The Pipeline resource accepts the following input properties:

AllowDuplicateNames bool
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
BudgetPolicyId string
optional string specifying ID of the budget policy for this DLT pipeline.
Catalog Changes to this property will trigger replacement. string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
Cause string
Channel string
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
ClusterId string
Clusters List<PipelineCluster>
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
Configuration Dictionary<string, string>
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
Continuous bool
A flag indicating whether to run the pipeline continuously. The default value is false.
CreatorUserName string
Deployment PipelineDeployment
Deployment type of this pipeline. Supports following attributes:
Development bool
A flag indicating whether to run the pipeline in development mode. The default value is false.
Edition string
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
EventLog PipelineEventLog
ExpectedLastModified int
Filters PipelineFilters
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
GatewayDefinition PipelineGatewayDefinition
The definition of a gateway pipeline to support CDC. Consists of following attributes:
Health string
IngestionDefinition PipelineIngestionDefinition
LastModified int
LatestUpdates List<PipelineLatestUpdate>
Libraries List<PipelineLibrary>
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
Name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
Notifications List<PipelineNotification>
Photon bool
A flag indicating whether to use Photon engine. The default value is false.
RestartWindow PipelineRestartWindow
RunAs PipelineRunAs
Schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
Serverless bool
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
State string
Storage Changes to this property will trigger replacement. string
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
Target string
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
Trigger PipelineTrigger
Url string
URL of the DLT pipeline on the given workspace.
AllowDuplicateNames bool
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
BudgetPolicyId string
optional string specifying ID of the budget policy for this DLT pipeline.
Catalog Changes to this property will trigger replacement. string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
Cause string
Channel string
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
ClusterId string
Clusters []PipelineClusterArgs
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
Configuration map[string]string
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
Continuous bool
A flag indicating whether to run the pipeline continuously. The default value is false.
CreatorUserName string
Deployment PipelineDeploymentArgs
Deployment type of this pipeline. Supports following attributes:
Development bool
A flag indicating whether to run the pipeline in development mode. The default value is false.
Edition string
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
EventLog PipelineEventLogArgs
ExpectedLastModified int
Filters PipelineFiltersArgs
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
GatewayDefinition PipelineGatewayDefinitionArgs
The definition of a gateway pipeline to support CDC. Consists of following attributes:
Health string
IngestionDefinition PipelineIngestionDefinitionArgs
LastModified int
LatestUpdates []PipelineLatestUpdateArgs
Libraries []PipelineLibraryArgs
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
Name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
Notifications []PipelineNotificationArgs
Photon bool
A flag indicating whether to use Photon engine. The default value is false.
RestartWindow PipelineRestartWindowArgs
RunAs PipelineRunAsArgs
Schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
Serverless bool
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
State string
Storage Changes to this property will trigger replacement. string
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
Target string
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
Trigger PipelineTriggerArgs
Url string
URL of the DLT pipeline on the given workspace.
allowDuplicateNames Boolean
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
budgetPolicyId String
optional string specifying ID of the budget policy for this DLT pipeline.
catalog Changes to this property will trigger replacement. String
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
cause String
channel String
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
clusterId String
clusters List<PipelineCluster>
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
configuration Map<String,String>
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
continuous Boolean
A flag indicating whether to run the pipeline continuously. The default value is false.
creatorUserName String
deployment PipelineDeployment
Deployment type of this pipeline. Supports following attributes:
development Boolean
A flag indicating whether to run the pipeline in development mode. The default value is false.
edition String
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
eventLog PipelineEventLog
expectedLastModified Integer
filters PipelineFilters
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
gatewayDefinition PipelineGatewayDefinition
The definition of a gateway pipeline to support CDC. Consists of following attributes:
health String
ingestionDefinition PipelineIngestionDefinition
lastModified Integer
latestUpdates List<PipelineLatestUpdate>
libraries List<PipelineLibrary>
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
name String
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
notifications List<PipelineNotification>
photon Boolean
A flag indicating whether to use Photon engine. The default value is false.
restartWindow PipelineRestartWindow
runAs PipelineRunAs
schema String
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
serverless Boolean
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
state String
storage Changes to this property will trigger replacement. String
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
target String
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
trigger PipelineTrigger
url String
URL of the DLT pipeline on the given workspace.
allowDuplicateNames boolean
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
budgetPolicyId string
optional string specifying ID of the budget policy for this DLT pipeline.
catalog Changes to this property will trigger replacement. string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
cause string
channel string
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
clusterId string
clusters PipelineCluster[]
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
configuration {[key: string]: string}
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
continuous boolean
A flag indicating whether to run the pipeline continuously. The default value is false.
creatorUserName string
deployment PipelineDeployment
Deployment type of this pipeline. Supports following attributes:
development boolean
A flag indicating whether to run the pipeline in development mode. The default value is false.
edition string
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
eventLog PipelineEventLog
expectedLastModified number
filters PipelineFilters
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
gatewayDefinition PipelineGatewayDefinition
The definition of a gateway pipeline to support CDC. Consists of following attributes:
health string
ingestionDefinition PipelineIngestionDefinition
lastModified number
latestUpdates PipelineLatestUpdate[]
libraries PipelineLibrary[]
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
notifications PipelineNotification[]
photon boolean
A flag indicating whether to use Photon engine. The default value is false.
restartWindow PipelineRestartWindow
runAs PipelineRunAs
schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
serverless boolean
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
state string
storage Changes to this property will trigger replacement. string
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
target string
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
trigger PipelineTrigger
url string
URL of the DLT pipeline on the given workspace.
allow_duplicate_names bool
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
budget_policy_id str
optional string specifying ID of the budget policy for this DLT pipeline.
catalog Changes to this property will trigger replacement. str
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
cause str
channel str
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
cluster_id str
clusters Sequence[PipelineClusterArgs]
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
configuration Mapping[str, str]
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
continuous bool
A flag indicating whether to run the pipeline continuously. The default value is false.
creator_user_name str
deployment PipelineDeploymentArgs
Deployment type of this pipeline. Supports following attributes:
development bool
A flag indicating whether to run the pipeline in development mode. The default value is false.
edition str
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
event_log PipelineEventLogArgs
expected_last_modified int
filters PipelineFiltersArgs
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
gateway_definition PipelineGatewayDefinitionArgs
The definition of a gateway pipeline to support CDC. Consists of following attributes:
health str
ingestion_definition PipelineIngestionDefinitionArgs
last_modified int
latest_updates Sequence[PipelineLatestUpdateArgs]
libraries Sequence[PipelineLibraryArgs]
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
name str
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
notifications Sequence[PipelineNotificationArgs]
photon bool
A flag indicating whether to use Photon engine. The default value is false.
restart_window PipelineRestartWindowArgs
run_as PipelineRunAsArgs
schema str
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
serverless bool
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
state str
storage Changes to this property will trigger replacement. str
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
target str
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
trigger PipelineTriggerArgs
url str
URL of the DLT pipeline on the given workspace.
allowDuplicateNames Boolean
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
budgetPolicyId String
optional string specifying ID of the budget policy for this DLT pipeline.
catalog Changes to this property will trigger replacement. String
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
cause String
channel String
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
clusterId String
clusters List<Property Map>
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
configuration Map<String>
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
continuous Boolean
A flag indicating whether to run the pipeline continuously. The default value is false.
creatorUserName String
deployment Property Map
Deployment type of this pipeline. Supports following attributes:
development Boolean
A flag indicating whether to run the pipeline in development mode. The default value is false.
edition String
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
eventLog Property Map
expectedLastModified Number
filters Property Map
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
gatewayDefinition Property Map
The definition of a gateway pipeline to support CDC. Consists of following attributes:
health String
ingestionDefinition Property Map
lastModified Number
latestUpdates List<Property Map>
libraries List<Property Map>
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
name String
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
notifications List<Property Map>
photon Boolean
A flag indicating whether to use Photon engine. The default value is false.
restartWindow Property Map
runAs Property Map
schema String
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
serverless Boolean
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
state String
storage Changes to this property will trigger replacement. String
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
target String
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
trigger Property Map
url String
URL of the DLT pipeline on the given workspace.

Outputs

All input properties are implicitly available as output properties. Additionally, the Pipeline resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
RunAsUserName string
Id string
The provider-assigned unique ID for this managed resource.
RunAsUserName string
id String
The provider-assigned unique ID for this managed resource.
runAsUserName String
id string
The provider-assigned unique ID for this managed resource.
runAsUserName string
id str
The provider-assigned unique ID for this managed resource.
run_as_user_name str
id String
The provider-assigned unique ID for this managed resource.
runAsUserName String

Look up Existing Pipeline Resource

Get an existing Pipeline resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: PipelineState, opts?: CustomResourceOptions): Pipeline
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        allow_duplicate_names: Optional[bool] = None,
        budget_policy_id: Optional[str] = None,
        catalog: Optional[str] = None,
        cause: Optional[str] = None,
        channel: Optional[str] = None,
        cluster_id: Optional[str] = None,
        clusters: Optional[Sequence[PipelineClusterArgs]] = None,
        configuration: Optional[Mapping[str, str]] = None,
        continuous: Optional[bool] = None,
        creator_user_name: Optional[str] = None,
        deployment: Optional[PipelineDeploymentArgs] = None,
        development: Optional[bool] = None,
        edition: Optional[str] = None,
        event_log: Optional[PipelineEventLogArgs] = None,
        expected_last_modified: Optional[int] = None,
        filters: Optional[PipelineFiltersArgs] = None,
        gateway_definition: Optional[PipelineGatewayDefinitionArgs] = None,
        health: Optional[str] = None,
        ingestion_definition: Optional[PipelineIngestionDefinitionArgs] = None,
        last_modified: Optional[int] = None,
        latest_updates: Optional[Sequence[PipelineLatestUpdateArgs]] = None,
        libraries: Optional[Sequence[PipelineLibraryArgs]] = None,
        name: Optional[str] = None,
        notifications: Optional[Sequence[PipelineNotificationArgs]] = None,
        photon: Optional[bool] = None,
        restart_window: Optional[PipelineRestartWindowArgs] = None,
        run_as: Optional[PipelineRunAsArgs] = None,
        run_as_user_name: Optional[str] = None,
        schema: Optional[str] = None,
        serverless: Optional[bool] = None,
        state: Optional[str] = None,
        storage: Optional[str] = None,
        target: Optional[str] = None,
        trigger: Optional[PipelineTriggerArgs] = None,
        url: Optional[str] = None) -> Pipeline
func GetPipeline(ctx *Context, name string, id IDInput, state *PipelineState, opts ...ResourceOption) (*Pipeline, error)
public static Pipeline Get(string name, Input<string> id, PipelineState? state, CustomResourceOptions? opts = null)
public static Pipeline get(String name, Output<String> id, PipelineState state, CustomResourceOptions options)
resources:  _:    type: databricks:Pipeline    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AllowDuplicateNames bool
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
BudgetPolicyId string
optional string specifying ID of the budget policy for this DLT pipeline.
Catalog Changes to this property will trigger replacement. string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
Cause string
Channel string
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
ClusterId string
Clusters List<PipelineCluster>
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
Configuration Dictionary<string, string>
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
Continuous bool
A flag indicating whether to run the pipeline continuously. The default value is false.
CreatorUserName string
Deployment PipelineDeployment
Deployment type of this pipeline. Supports following attributes:
Development bool
A flag indicating whether to run the pipeline in development mode. The default value is false.
Edition string
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
EventLog PipelineEventLog
ExpectedLastModified int
Filters PipelineFilters
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
GatewayDefinition PipelineGatewayDefinition
The definition of a gateway pipeline to support CDC. Consists of following attributes:
Health string
IngestionDefinition PipelineIngestionDefinition
LastModified int
LatestUpdates List<PipelineLatestUpdate>
Libraries List<PipelineLibrary>
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
Name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
Notifications List<PipelineNotification>
Photon bool
A flag indicating whether to use Photon engine. The default value is false.
RestartWindow PipelineRestartWindow
RunAs PipelineRunAs
RunAsUserName string
Schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
Serverless bool
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
State string
Storage Changes to this property will trigger replacement. string
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
Target string
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
Trigger PipelineTrigger
Url string
URL of the DLT pipeline on the given workspace.
AllowDuplicateNames bool
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
BudgetPolicyId string
optional string specifying ID of the budget policy for this DLT pipeline.
Catalog Changes to this property will trigger replacement. string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
Cause string
Channel string
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
ClusterId string
Clusters []PipelineClusterArgs
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
Configuration map[string]string
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
Continuous bool
A flag indicating whether to run the pipeline continuously. The default value is false.
CreatorUserName string
Deployment PipelineDeploymentArgs
Deployment type of this pipeline. Supports following attributes:
Development bool
A flag indicating whether to run the pipeline in development mode. The default value is false.
Edition string
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
EventLog PipelineEventLogArgs
ExpectedLastModified int
Filters PipelineFiltersArgs
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
GatewayDefinition PipelineGatewayDefinitionArgs
The definition of a gateway pipeline to support CDC. Consists of following attributes:
Health string
IngestionDefinition PipelineIngestionDefinitionArgs
LastModified int
LatestUpdates []PipelineLatestUpdateArgs
Libraries []PipelineLibraryArgs
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
Name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
Notifications []PipelineNotificationArgs
Photon bool
A flag indicating whether to use Photon engine. The default value is false.
RestartWindow PipelineRestartWindowArgs
RunAs PipelineRunAsArgs
RunAsUserName string
Schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
Serverless bool
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
State string
Storage Changes to this property will trigger replacement. string
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
Target string
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
Trigger PipelineTriggerArgs
Url string
URL of the DLT pipeline on the given workspace.
allowDuplicateNames Boolean
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
budgetPolicyId String
optional string specifying ID of the budget policy for this DLT pipeline.
catalog Changes to this property will trigger replacement. String
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
cause String
channel String
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
clusterId String
clusters List<PipelineCluster>
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
configuration Map<String,String>
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
continuous Boolean
A flag indicating whether to run the pipeline continuously. The default value is false.
creatorUserName String
deployment PipelineDeployment
Deployment type of this pipeline. Supports following attributes:
development Boolean
A flag indicating whether to run the pipeline in development mode. The default value is false.
edition String
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
eventLog PipelineEventLog
expectedLastModified Integer
filters PipelineFilters
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
gatewayDefinition PipelineGatewayDefinition
The definition of a gateway pipeline to support CDC. Consists of following attributes:
health String
ingestionDefinition PipelineIngestionDefinition
lastModified Integer
latestUpdates List<PipelineLatestUpdate>
libraries List<PipelineLibrary>
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
name String
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
notifications List<PipelineNotification>
photon Boolean
A flag indicating whether to use Photon engine. The default value is false.
restartWindow PipelineRestartWindow
runAs PipelineRunAs
runAsUserName String
schema String
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
serverless Boolean
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
state String
storage Changes to this property will trigger replacement. String
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
target String
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
trigger PipelineTrigger
url String
URL of the DLT pipeline on the given workspace.
allowDuplicateNames boolean
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
budgetPolicyId string
optional string specifying ID of the budget policy for this DLT pipeline.
catalog Changes to this property will trigger replacement. string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
cause string
channel string
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
clusterId string
clusters PipelineCluster[]
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
configuration {[key: string]: string}
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
continuous boolean
A flag indicating whether to run the pipeline continuously. The default value is false.
creatorUserName string
deployment PipelineDeployment
Deployment type of this pipeline. Supports following attributes:
development boolean
A flag indicating whether to run the pipeline in development mode. The default value is false.
edition string
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
eventLog PipelineEventLog
expectedLastModified number
filters PipelineFilters
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
gatewayDefinition PipelineGatewayDefinition
The definition of a gateway pipeline to support CDC. Consists of following attributes:
health string
ingestionDefinition PipelineIngestionDefinition
lastModified number
latestUpdates PipelineLatestUpdate[]
libraries PipelineLibrary[]
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
notifications PipelineNotification[]
photon boolean
A flag indicating whether to use Photon engine. The default value is false.
restartWindow PipelineRestartWindow
runAs PipelineRunAs
runAsUserName string
schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
serverless boolean
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
state string
storage Changes to this property will trigger replacement. string
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
target string
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
trigger PipelineTrigger
url string
URL of the DLT pipeline on the given workspace.
allow_duplicate_names bool
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
budget_policy_id str
optional string specifying ID of the budget policy for this DLT pipeline.
catalog Changes to this property will trigger replacement. str
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
cause str
channel str
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
cluster_id str
clusters Sequence[PipelineClusterArgs]
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
configuration Mapping[str, str]
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
continuous bool
A flag indicating whether to run the pipeline continuously. The default value is false.
creator_user_name str
deployment PipelineDeploymentArgs
Deployment type of this pipeline. Supports following attributes:
development bool
A flag indicating whether to run the pipeline in development mode. The default value is false.
edition str
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
event_log PipelineEventLogArgs
expected_last_modified int
filters PipelineFiltersArgs
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
gateway_definition PipelineGatewayDefinitionArgs
The definition of a gateway pipeline to support CDC. Consists of following attributes:
health str
ingestion_definition PipelineIngestionDefinitionArgs
last_modified int
latest_updates Sequence[PipelineLatestUpdateArgs]
libraries Sequence[PipelineLibraryArgs]
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
name str
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
notifications Sequence[PipelineNotificationArgs]
photon bool
A flag indicating whether to use Photon engine. The default value is false.
restart_window PipelineRestartWindowArgs
run_as PipelineRunAsArgs
run_as_user_name str
schema str
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
serverless bool
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
state str
storage Changes to this property will trigger replacement. str
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
target str
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
trigger PipelineTriggerArgs
url str
URL of the DLT pipeline on the given workspace.
allowDuplicateNames Boolean
Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
budgetPolicyId String
optional string specifying ID of the budget policy for this DLT pipeline.
catalog Changes to this property will trigger replacement. String
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
cause String
channel String
optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
clusterId String
clusters List<Property Map>
blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
configuration Map<String>
An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
continuous Boolean
A flag indicating whether to run the pipeline continuously. The default value is false.
creatorUserName String
deployment Property Map
Deployment type of this pipeline. Supports following attributes:
development Boolean
A flag indicating whether to run the pipeline in development mode. The default value is false.
edition String
optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
eventLog Property Map
expectedLastModified Number
filters Property Map
Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
gatewayDefinition Property Map
The definition of a gateway pipeline to support CDC. Consists of following attributes:
health String
ingestionDefinition Property Map
lastModified Number
latestUpdates List<Property Map>
libraries List<Property Map>
blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
name String
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
notifications List<Property Map>
photon Boolean
A flag indicating whether to use Photon engine. The default value is false.
restartWindow Property Map
runAs Property Map
runAsUserName String
schema String
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
serverless Boolean
An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
state String
storage Changes to this property will trigger replacement. String
A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
target String
The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
trigger Property Map
url String
URL of the DLT pipeline on the given workspace.

Supporting Types

PipelineCluster
, PipelineClusterArgs

PipelineClusterAutoscale
, PipelineClusterAutoscaleArgs

MaxWorkers This property is required. int
MinWorkers This property is required. int
Mode string
MaxWorkers This property is required. int
MinWorkers This property is required. int
Mode string
maxWorkers This property is required. Integer
minWorkers This property is required. Integer
mode String
maxWorkers This property is required. number
minWorkers This property is required. number
mode string
max_workers This property is required. int
min_workers This property is required. int
mode str
maxWorkers This property is required. Number
minWorkers This property is required. Number
mode String

PipelineClusterAwsAttributes
, PipelineClusterAwsAttributesArgs

PipelineClusterAzureAttributes
, PipelineClusterAzureAttributesArgs

PipelineClusterAzureAttributesLogAnalyticsInfo
, PipelineClusterAzureAttributesLogAnalyticsInfoArgs

PipelineClusterClusterLogConf
, PipelineClusterClusterLogConfArgs

PipelineClusterClusterLogConfDbfs
, PipelineClusterClusterLogConfDbfsArgs

Destination This property is required. string
Destination This property is required. string
destination This property is required. String
destination This property is required. string
destination This property is required. str
destination This property is required. String

PipelineClusterClusterLogConfS3
, PipelineClusterClusterLogConfS3Args

Destination This property is required. string
CannedAcl string
EnableEncryption bool
EncryptionType string
Endpoint string
KmsKey string
Region string
Destination This property is required. string
CannedAcl string
EnableEncryption bool
EncryptionType string
Endpoint string
KmsKey string
Region string
destination This property is required. String
cannedAcl String
enableEncryption Boolean
encryptionType String
endpoint String
kmsKey String
region String
destination This property is required. string
cannedAcl string
enableEncryption boolean
encryptionType string
endpoint string
kmsKey string
region string
destination This property is required. str
canned_acl str
enable_encryption bool
encryption_type str
endpoint str
kms_key str
region str
destination This property is required. String
cannedAcl String
enableEncryption Boolean
encryptionType String
endpoint String
kmsKey String
region String

PipelineClusterClusterLogConfVolumes
, PipelineClusterClusterLogConfVolumesArgs

Destination This property is required. string
Destination This property is required. string
destination This property is required. String
destination This property is required. string
destination This property is required. str
destination This property is required. String

PipelineClusterGcpAttributes
, PipelineClusterGcpAttributesArgs

PipelineClusterInitScript
, PipelineClusterInitScriptArgs

abfss Property Map
dbfs Property Map

Deprecated: For init scripts use 'volumes', 'workspace' or cloud storage location instead of 'dbfs'.

file Property Map
gcs Property Map
s3 Property Map
volumes Property Map
workspace Property Map

PipelineClusterInitScriptAbfss
, PipelineClusterInitScriptAbfssArgs

Destination This property is required. string
Destination This property is required. string
destination This property is required. String
destination This property is required. string
destination This property is required. str
destination This property is required. String

PipelineClusterInitScriptDbfs
, PipelineClusterInitScriptDbfsArgs

Destination This property is required. string
Destination This property is required. string
destination This property is required. String
destination This property is required. string
destination This property is required. str
destination This property is required. String

PipelineClusterInitScriptFile
, PipelineClusterInitScriptFileArgs

Destination This property is required. string
Destination This property is required. string
destination This property is required. String
destination This property is required. string
destination This property is required. str
destination This property is required. String

PipelineClusterInitScriptGcs
, PipelineClusterInitScriptGcsArgs

Destination This property is required. string
Destination This property is required. string
destination This property is required. String
destination This property is required. string
destination This property is required. str
destination This property is required. String

PipelineClusterInitScriptS3
, PipelineClusterInitScriptS3Args

Destination This property is required. string
CannedAcl string
EnableEncryption bool
EncryptionType string
Endpoint string
KmsKey string
Region string
Destination This property is required. string
CannedAcl string
EnableEncryption bool
EncryptionType string
Endpoint string
KmsKey string
Region string
destination This property is required. String
cannedAcl String
enableEncryption Boolean
encryptionType String
endpoint String
kmsKey String
region String
destination This property is required. string
cannedAcl string
enableEncryption boolean
encryptionType string
endpoint string
kmsKey string
region string
destination This property is required. str
canned_acl str
enable_encryption bool
encryption_type str
endpoint str
kms_key str
region str
destination This property is required. String
cannedAcl String
enableEncryption Boolean
encryptionType String
endpoint String
kmsKey String
region String

PipelineClusterInitScriptVolumes
, PipelineClusterInitScriptVolumesArgs

Destination This property is required. string
Destination This property is required. string
destination This property is required. String
destination This property is required. string
destination This property is required. str
destination This property is required. String

PipelineClusterInitScriptWorkspace
, PipelineClusterInitScriptWorkspaceArgs

Destination This property is required. string
Destination This property is required. string
destination This property is required. String
destination This property is required. string
destination This property is required. str
destination This property is required. String

PipelineDeployment
, PipelineDeploymentArgs

Kind string
The deployment method that manages the pipeline.
MetadataFilePath string
The path to the file containing metadata about the deployment.
Kind string
The deployment method that manages the pipeline.
MetadataFilePath string
The path to the file containing metadata about the deployment.
kind String
The deployment method that manages the pipeline.
metadataFilePath String
The path to the file containing metadata about the deployment.
kind string
The deployment method that manages the pipeline.
metadataFilePath string
The path to the file containing metadata about the deployment.
kind str
The deployment method that manages the pipeline.
metadata_file_path str
The path to the file containing metadata about the deployment.
kind String
The deployment method that manages the pipeline.
metadataFilePath String
The path to the file containing metadata about the deployment.

PipelineEventLog
, PipelineEventLogArgs

Catalog string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
Name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
Schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
Catalog string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
Name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
Schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
catalog String
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
name String
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
schema String
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
catalog string
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
name string
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
schema string
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
catalog str
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
name str
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
schema str
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
catalog String
The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
name String
A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
schema String
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.

PipelineFilters
, PipelineFiltersArgs

Excludes List<string>
Paths to exclude.
Includes List<string>
Paths to include.
Excludes []string
Paths to exclude.
Includes []string
Paths to include.
excludes List<String>
Paths to exclude.
includes List<String>
Paths to include.
excludes string[]
Paths to exclude.
includes string[]
Paths to include.
excludes Sequence[str]
Paths to exclude.
includes Sequence[str]
Paths to include.
excludes List<String>
Paths to exclude.
includes List<String>
Paths to include.

PipelineGatewayDefinition
, PipelineGatewayDefinitionArgs

ConnectionId Changes to this property will trigger replacement. string
Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
ConnectionName string
GatewayStorageCatalog Changes to this property will trigger replacement. string
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
GatewayStorageName string
Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
GatewayStorageSchema Changes to this property will trigger replacement. string
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
ConnectionId Changes to this property will trigger replacement. string
Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
ConnectionName string
GatewayStorageCatalog Changes to this property will trigger replacement. string
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
GatewayStorageName string
Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
GatewayStorageSchema Changes to this property will trigger replacement. string
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
connectionId Changes to this property will trigger replacement. String
Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
connectionName String
gatewayStorageCatalog Changes to this property will trigger replacement. String
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
gatewayStorageName String
Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
gatewayStorageSchema Changes to this property will trigger replacement. String
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
connectionId Changes to this property will trigger replacement. string
Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
connectionName string
gatewayStorageCatalog Changes to this property will trigger replacement. string
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
gatewayStorageName string
Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
gatewayStorageSchema Changes to this property will trigger replacement. string
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
connection_id Changes to this property will trigger replacement. str
Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
connection_name str
gateway_storage_catalog Changes to this property will trigger replacement. str
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
gateway_storage_name str
Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
gateway_storage_schema Changes to this property will trigger replacement. str
Required, Immutable. The name of the schema for the gateway pipelines's storage location.
connectionId Changes to this property will trigger replacement. String
Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
connectionName String
gatewayStorageCatalog Changes to this property will trigger replacement. String
Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
gatewayStorageName String
Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
gatewayStorageSchema Changes to this property will trigger replacement. String
Required, Immutable. The name of the schema for the gateway pipelines's storage location.

PipelineIngestionDefinition
, PipelineIngestionDefinitionArgs

ConnectionName Changes to this property will trigger replacement. string
IngestionGatewayId Changes to this property will trigger replacement. string
Objects List<PipelineIngestionDefinitionObject>
TableConfiguration PipelineIngestionDefinitionTableConfiguration
ConnectionName Changes to this property will trigger replacement. string
IngestionGatewayId Changes to this property will trigger replacement. string
Objects []PipelineIngestionDefinitionObject
TableConfiguration PipelineIngestionDefinitionTableConfiguration
connectionName Changes to this property will trigger replacement. String
ingestionGatewayId Changes to this property will trigger replacement. String
objects List<PipelineIngestionDefinitionObject>
tableConfiguration PipelineIngestionDefinitionTableConfiguration
connectionName Changes to this property will trigger replacement. string
ingestionGatewayId Changes to this property will trigger replacement. string
objects PipelineIngestionDefinitionObject[]
tableConfiguration PipelineIngestionDefinitionTableConfiguration
connection_name Changes to this property will trigger replacement. str
ingestion_gateway_id Changes to this property will trigger replacement. str
objects Sequence[PipelineIngestionDefinitionObject]
table_configuration PipelineIngestionDefinitionTableConfiguration
connectionName Changes to this property will trigger replacement. String
ingestionGatewayId Changes to this property will trigger replacement. String
objects List<Property Map>
tableConfiguration Property Map

PipelineIngestionDefinitionObject
, PipelineIngestionDefinitionObjectArgs

Report PipelineIngestionDefinitionObjectReport
Schema PipelineIngestionDefinitionObjectSchema
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
Table PipelineIngestionDefinitionObjectTable
Report PipelineIngestionDefinitionObjectReport
Schema PipelineIngestionDefinitionObjectSchema
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
Table PipelineIngestionDefinitionObjectTable
report PipelineIngestionDefinitionObjectReport
schema PipelineIngestionDefinitionObjectSchema
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
table PipelineIngestionDefinitionObjectTable
report PipelineIngestionDefinitionObjectReport
schema PipelineIngestionDefinitionObjectSchema
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
table PipelineIngestionDefinitionObjectTable
report PipelineIngestionDefinitionObjectReport
schema PipelineIngestionDefinitionObjectSchema
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
table PipelineIngestionDefinitionObjectTable
report Property Map
schema Property Map
The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
table Property Map

PipelineIngestionDefinitionObjectReport
, PipelineIngestionDefinitionObjectReportArgs

PipelineIngestionDefinitionObjectReportTableConfiguration
, PipelineIngestionDefinitionObjectReportTableConfigurationArgs

primaryKeys List<String>
salesforceIncludeFormulaFields Boolean
scdType String
sequenceBies List<String>
primaryKeys List<String>
salesforceIncludeFormulaFields Boolean
scdType String
sequenceBies List<String>

PipelineIngestionDefinitionObjectSchema
, PipelineIngestionDefinitionObjectSchemaArgs

PipelineIngestionDefinitionObjectSchemaTableConfiguration
, PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs

primaryKeys List<String>
salesforceIncludeFormulaFields Boolean
scdType String
sequenceBies List<String>
primaryKeys List<String>
salesforceIncludeFormulaFields Boolean
scdType String
sequenceBies List<String>

PipelineIngestionDefinitionObjectTable
, PipelineIngestionDefinitionObjectTableArgs

PipelineIngestionDefinitionObjectTableTableConfiguration
, PipelineIngestionDefinitionObjectTableTableConfigurationArgs

primaryKeys List<String>
salesforceIncludeFormulaFields Boolean
scdType String
sequenceBies List<String>
primaryKeys List<String>
salesforceIncludeFormulaFields Boolean
scdType String
sequenceBies List<String>

PipelineIngestionDefinitionTableConfiguration
, PipelineIngestionDefinitionTableConfigurationArgs

primaryKeys List<String>
salesforceIncludeFormulaFields Boolean
scdType String
sequenceBies List<String>
primaryKeys List<String>
salesforceIncludeFormulaFields Boolean
scdType String
sequenceBies List<String>

PipelineLatestUpdate
, PipelineLatestUpdateArgs

CreationTime string
State string
UpdateId string
CreationTime string
State string
UpdateId string
creationTime String
state String
updateId String
creationTime string
state string
updateId string
creationTime String
state String
updateId String

PipelineLibrary
, PipelineLibraryArgs

File PipelineLibraryFile
Jar string
Maven PipelineLibraryMaven
Notebook PipelineLibraryNotebook
Whl string

Deprecated: The 'whl' field is deprecated

File PipelineLibraryFile
Jar string
Maven PipelineLibraryMaven
Notebook PipelineLibraryNotebook
Whl string

Deprecated: The 'whl' field is deprecated

file PipelineLibraryFile
jar String
maven PipelineLibraryMaven
notebook PipelineLibraryNotebook
whl String

Deprecated: The 'whl' field is deprecated

file PipelineLibraryFile
jar string
maven PipelineLibraryMaven
notebook PipelineLibraryNotebook
whl string

Deprecated: The 'whl' field is deprecated

file Property Map
jar String
maven Property Map
notebook Property Map
whl String

Deprecated: The 'whl' field is deprecated

PipelineLibraryFile
, PipelineLibraryFileArgs

Path string
Path string
path String
path string
path str
path String

PipelineLibraryMaven
, PipelineLibraryMavenArgs

Coordinates This property is required. string
Exclusions List<string>
Repo string
Coordinates This property is required. string
Exclusions []string
Repo string
coordinates This property is required. String
exclusions List<String>
repo String
coordinates This property is required. string
exclusions string[]
repo string
coordinates This property is required. str
exclusions Sequence[str]
repo str
coordinates This property is required. String
exclusions List<String>
repo String

PipelineLibraryNotebook
, PipelineLibraryNotebookArgs

Path string
Path string
path String
path string
path str
path String

PipelineNotification
, PipelineNotificationArgs

Alerts List<string>
non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

  • on-update-success - a pipeline update completes successfully.
  • on-update-failure - a pipeline update fails with a retryable error.
  • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
  • on-flow-failure - a single data flow fails.
EmailRecipients List<string>
non-empty list of emails to notify.
Alerts []string
non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

  • on-update-success - a pipeline update completes successfully.
  • on-update-failure - a pipeline update fails with a retryable error.
  • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
  • on-flow-failure - a single data flow fails.
EmailRecipients []string
non-empty list of emails to notify.
alerts List<String>
non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

  • on-update-success - a pipeline update completes successfully.
  • on-update-failure - a pipeline update fails with a retryable error.
  • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
  • on-flow-failure - a single data flow fails.
emailRecipients List<String>
non-empty list of emails to notify.
alerts string[]
non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

  • on-update-success - a pipeline update completes successfully.
  • on-update-failure - a pipeline update fails with a retryable error.
  • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
  • on-flow-failure - a single data flow fails.
emailRecipients string[]
non-empty list of emails to notify.
alerts Sequence[str]
non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

  • on-update-success - a pipeline update completes successfully.
  • on-update-failure - a pipeline update fails with a retryable error.
  • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
  • on-flow-failure - a single data flow fails.
email_recipients Sequence[str]
non-empty list of emails to notify.
alerts List<String>
non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

  • on-update-success - a pipeline update completes successfully.
  • on-update-failure - a pipeline update fails with a retryable error.
  • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
  • on-flow-failure - a single data flow fails.
emailRecipients List<String>
non-empty list of emails to notify.

PipelineRestartWindow
, PipelineRestartWindowArgs

StartHour This property is required. int
DaysOfWeeks List<string>
TimeZoneId string
StartHour This property is required. int
DaysOfWeeks []string
TimeZoneId string
startHour This property is required. Integer
daysOfWeeks List<String>
timeZoneId String
startHour This property is required. number
daysOfWeeks string[]
timeZoneId string
start_hour This property is required. int
days_of_weeks Sequence[str]
time_zone_id str
startHour This property is required. Number
daysOfWeeks List<String>
timeZoneId String

PipelineRunAs
, PipelineRunAsArgs

PipelineTrigger
, PipelineTriggerArgs

PipelineTriggerCron
, PipelineTriggerCronArgs

Import

The resource job can be imported using the id of the pipeline

bash

$ pulumi import databricks:index/pipeline:Pipeline this <pipeline-id>
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes
This Pulumi package is based on the databricks Terraform Provider.